def update_key(self, session, stat_key, day): # determine the value from the day before query = (session.query(Stat) .filter((Stat.key == stat_key), (Stat.time < day)) .order_by(Stat.time.desc())) before = query.first() old_value = 0 if before: old_value = before.value # get the value from redis for the day in question stat_counter = StatCounter(stat_key, day) value = stat_counter.get(self.task.redis_client) # insert or update a new stat value query = (session.query(Stat) .filter((Stat.key == stat_key), (Stat.time == day))) stat = query.first() if stat is not None: stat.value += value else: stmt = Stat.__table__.insert( mysql_on_duplicate='value = value + %s' % value ).values(key=stat_key, time=day, value=old_value + value) session.execute(stmt) # queue the redis value to be decreased stat_counter.decr(self.pipe, value)
def update_key(self, session, pipe, stat_key, day): # determine the value from the day before query = (session.query(Stat).filter( (Stat.key == stat_key), (Stat.time < day)).order_by(Stat.time.desc())) before = query.first() old_value = 0 if before: old_value = before.value # get the value from redis for the day in question stat_counter = StatCounter(stat_key, day) value = stat_counter.get(self.task.redis_client) # insert or update a new stat value query = (session.query(Stat).filter((Stat.key == stat_key), (Stat.time == day))) stat = query.first() if stat is not None: stat.value += value else: stmt = Stat.__table__.insert( mysql_on_duplicate='value = value + %s' % value).values( key=stat_key, time=day, value=old_value + value) session.execute(stmt) # queue the redis value to be decreased stat_counter.decr(pipe, value)
def emit_stats(self, pipe, stats_counter): StatCounter(self.stat_obs_key, self.today).incr(pipe, stats_counter["obs"]) StatCounter(self.stat_station_key, self.today).incr(pipe, stats_counter["new"]) self.stat_count("observation", "insert", stats_counter["obs"]) self.stat_count("station", "blocklist", stats_counter["block"]) self.stat_count("station", "confirm", stats_counter["confirm"]) self.stat_count("station", "new", stats_counter["new"])
def emit_stats(self, pipe, stats_counter): StatCounter(self.stat_obs_key, self.today).incr(pipe, stats_counter['obs']) StatCounter(self.stat_station_key, self.today).incr(pipe, stats_counter['new']) self.stat_count('observation', 'insert', stats_counter['obs']) self.stat_count('station', 'blocklist', stats_counter['block']) self.stat_count('station', 'confirm', stats_counter['confirm']) self.stat_count('station', 'new', stats_counter['new'])
def emit_stats(self, pipe, stats_counter, drop_counter): day = self.today StatCounter(self.stat_obs_key, day).incr(pipe, stats_counter['obs']) StatCounter(self.stat_station_key, day).incr(pipe, stats_counter['new_station']) self.stat_count('insert', stats_counter['obs']) for reason, count in drop_counter.items(): self.stat_count('drop', drop_counter[reason], reason=reason) if stats_counter['block']: self.task.stats_client.incr('data.station.blocklist', stats_counter['block'], tags=[ 'type:%s' % self.station_type, 'action:add', 'reason:moving' ])
def update_key(self, session, pipe, stat_key, day): # Get value for the given day from Redis. stat_counter = StatCounter(stat_key, day) value = stat_counter.get(self.task.redis_client) # Get value for the given day from the database. columns = Stat.__table__.c stat = session.execute( select([columns.value]).where(columns.key == stat_key).where( columns.time == day)).fetchone() if stat is not None: # If the day already has an entry, update it. if value: session.execute(Stat.__table__.update().where( columns.key == stat_key).where(columns.time == day).values( value=value + columns.value)) stat_counter.decr(pipe, value) else: # Get the most recent value for the stat from the database. before = session.execute( select([columns.value]).where(columns.key == stat_key).where( columns.time < day).order_by( columns.time.desc()).limit(1)).fetchone() old_value = before.value if before else 0 # Insert a new stat value. stmt = Stat.__table__.insert( mysql_on_duplicate='value = value + %s' % value).values( key=stat_key, time=day, value=old_value + value) session.execute(stmt) stat_counter.decr(pipe, value)
def update_key(self, stat_key, day): # determine the value from the day before query = (self.session.query(Stat) .filter(Stat.key == stat_key) .filter(Stat.time < day) .order_by(Stat.time.desc())) before = query.first() old_value = 0 if before: old_value = before.value # get the value from redis for the day in question stat_counter = StatCounter(stat_key, day) value = stat_counter.get(self.redis_client) # insert or update a new stat value hashkey = Stat.to_hashkey(key=stat_key, time=day) Stat.incr(self.session, hashkey, value, old=old_value) # queue the redis value to be decreased stat_counter.decr(self.pipe, value)
def update_key(self, session, pipe, stat_key, day): # Get value for the given day from Redis. stat_counter = StatCounter(stat_key, day) value = stat_counter.get(self.task.redis_client) # Get value for the given day from the database. stat = (session.query(Stat) .filter((Stat.key == stat_key), (Stat.time == day))).first() if stat is not None: # If the day already has an entry, update it. if value: stat.value += value stat_counter.decr(pipe, value) else: # Get the most recent value for the stat from the database. before = (session.query(Stat) .filter((Stat.key == stat_key), (Stat.time < day)) .order_by(Stat.time.desc()) .limit(1)).first() old_value = 0 if before: old_value = before.value # Insert a new stat value. stmt = Stat.__table__.insert( mysql_on_duplicate='value = value + %s' % value ).values(key=stat_key, time=day, value=old_value + value) session.execute(stmt) stat_counter.decr(pipe, value)
def commit_batch(rows): all_inserted_rows = 0 for shard_id, shard_rows in rows.items(): table_insert = shards[shard_id].__table__.insert( mysql_on_duplicate=on_duplicate) result = session.execute(table_insert, shard_rows) count = result.rowcount # apply trick to avoid querying for existing rows, # MySQL claims 1 row for an inserted row, 2 for an updated row inserted_rows = 2 * len(shard_rows) - count changed_rows = count - len(shard_rows) assert inserted_rows + changed_rows == len(shard_rows) all_inserted_rows += inserted_rows StatCounter(self.stat_key, today).incr(pipe, all_inserted_rows)
def update_key(self, session, pipe, stat_key, day): # Get value for the given day from Redis. stat_counter = StatCounter(stat_key, day) value = stat_counter.get(self.task.redis_client) # Get value for the given day from the database. columns = Stat.__table__.c stat = session.execute( select([columns.value]) .where(columns.key == stat_key) .where(columns.time == day) ).fetchone() if stat is not None: # If the day already has an entry, update it. if value: session.execute( Stat.__table__.update() .where(columns.key == stat_key) .where(columns.time == day) .values(value=value + columns.value) ) stat_counter.decr(pipe, value) else: # Get the most recent value for the stat from the database. before = session.execute( select([columns.value]) .where(columns.key == stat_key) .where(columns.time < day) .order_by(columns.time.desc()) .limit(1) ).fetchone() old_value = before.value if before else 0 # Insert a new stat value. stmt = Stat.__table__.insert( mysql_on_duplicate='value = value + %s' % value ).values(key=stat_key, time=day, value=old_value + value) session.execute(stmt) stat_counter.decr(pipe, value)
def check_statcounter(self, redis, stat_key, value): stat_counter = StatCounter(stat_key, util.utcnow()) assert stat_counter.get(redis) == value
def add_counter(self, stat_key, time, value): stat_counter = StatCounter(stat_key, time) with redis_pipeline(self.redis_client) as pipe: stat_counter.incr(pipe, value)
def add_counter(self, redis, stat_key, time, value): stat_counter = StatCounter(stat_key, time) with redis_pipeline(redis) as pipe: stat_counter.incr(pipe, value)
def check_statcounter(self, stat_key, value): stat_counter = StatCounter(stat_key, util.utcnow()) self.assertEqual(stat_counter.get(self.redis_client), value)