def remove_duplicates(): Event2 = Event.alias() query = Event.select(Event.id)\ .join(Event2, on=(Event.hash == Event2.hash))\ .where(Event.hash.is_null(False) and Event.id > Event2.id) with db.atomic(): print "Deleting " + str(len(query)) + " duplicates..." for item in query: Event.delete_by_id(item.id) print "Done."
def get_events(): time = request.args.get('time') before = request.args.get('before') == 'true' try: date = datetime.datetime.fromtimestamp(float(time) / 1000.0) except OSError: date = datetime.datetime.min filter_include = request.args.get('include') == "true" tag_ids = [ Tag.get_tag(tag).id for tag in json.loads(request.args.get('tags')) ] filter_query = fn.EXISTS(TagToEvent.select().where( TagToEvent.tag.in_(tag_ids) & (TagToEvent.event == Event.id))) if not filter_include: filter_query = ~filter_query n = 100 if any(tag_ids): if before: query = Event.select().where((Event.time < date) & filter_query).order_by( Event.time.desc())[:n] else: query = Event.select().where((Event.time > date) & filter_query).order_by( Event.time)[:n] else: if before: query = Event.select().where(Event.time < date).order_by( Event.time.desc())[:n] else: query = Event.select().where(Event.time > date).order_by( Event.time)[:n] result = [to_dict(event) for event in query] if before: result = reversed(result) return jsonify(list(result))
def sync_media_file(): while True: cur_hour = int(time.strftime('%H')) if cur_hour >= syncconfig.start_at and cur_hour < syncconfig.stop_at: try: event = yield dbutil.do( Event.select().where((Event.syncfinish != 1) | (Event.syncfinish >> None)). order_by(+Event.createtime).get) logging.info('开始处理活动[#{0}] 的media文件'.format(event.code)) if event.mediaids: all = json.loads(event.mediaids) logging.debug('all ' + str(all)) if event.syncstatus: uploaded = json.loads(event.syncstatus) else: uploaded = [] if event.imgurls: img_urls = json.loads(event.imgurls) else: img_urls = {} to_upload = list(set(all) - set(uploaded)) logging.info('待处理media文件清单' + str(to_upload)) results = yield [ wxutil.process_temp_resource(media_id) for media_id in to_upload ] for i in range(0, len(results)): if results[i] is not None: uploaded.append(to_upload[i]) img_urls[to_upload[i]] = results[i] event.syncstatus = json.dumps(uploaded, ensure_ascii=False) event.imgurls = json.dumps(img_urls, ensure_ascii=False) logging.info('已完成media文件清单:' + str(uploaded)) if set(all) == set(uploaded): logging.info('活动[#{0}] 所有media文件已完成'.format( event.code)) else: logging.info('mediaids不存在,没有media文件需要同步') event.syncfinish = 1 yield dbutil.do(event.save) except DoesNotExist: logging.info('无待同步media文件, sleep...') yield gen.sleep(60 * 10) except Exception as e: logging.exception('同步media文件发生异常:{0}, 退出...'.format(str(e))) break else: yield gen.sleep(60 * 10)
def query_fulltext_code(code): pinyin = Pinyin() initials = [] for letter in pinyin.get_initials(code, splitter=' ').lower().split(' '): if letter.isalpha(): initials.append(letter * 4) logging.debug(initials) analysed_code = pinyin.get_pinyin(code, splitter=u' ') + ' ' + ' '.join(initials) logging.debug(analysed_code) clause = "MATCH(`codepinyin`, `codepinyininitials`) AGAINST (%s)" query = yield dbutil.do(Event.select(SQL('*, ' + clause + ' AS similarity', analysed_code)).where( SQL(clause, analysed_code)).limit(4).dicts) events = [event for event in query] logging.debug(events) return events
def post(self): try: data = json.loads(self.request.body.decode()) logging.debug(data) # 默认查询第一页 page_number = data.get('page_number', 1) # 默认每页显示4条数据 items_per_page = data.get('items_per_page', 4) query = yield dbutil.do( Event.select().order_by(-Event.createtime).paginate( page_number, items_per_page).dicts) result = [event for event in query] self.write(Response(status=1, msg='ok', result=result).json()) except Exception as e: self.write(Response(msg='sorry,亲,活动查询失败').json()) logging.exception('CreateEventHandler error: {0}'.format(str(e)))
def post(self): try: data = json.loads(self.request.body.decode()) code = data['eventcode'] openid = data['openid'] star = None try: star = yield dbutil.do( Star.select().where((Star.eventcode == code) & (Star.openid == openid)).get) except: pass if star: if star.status == 1: # 已经关注 raise RuntimeError else: # 已经取消关注 star.status = 1 star.updatetime = datetime.datetime.now() else: # 首次关注 results = yield [ dbutil.do( Event.select( Event.id, Event.code).where(Event.code == code).get), dbutil.do( User.select( User.id, User.openid).where(User.openid == openid).get) ] event = results[0] user = results[1] star = Star(eventid=event.get_id(), eventcode=event.code, userid=user.get_id(), openid=user.openid, createtime=datetime.datetime.now(), status=1) yield dbutil.do(star.save) self.write(Response(status=1, msg='ok', result={}).json()) except RuntimeError as e: self.write(Response(msg='sorry,亲,用户已经关注该活动').json()) logging.exception('StarEventHandler error: {0}'.format(str(e))) except Exception as e: self.write(Response(msg='sorry,亲,关注活动失败').json()) logging.exception('StarEventHandler error: {0}'.format(str(e)))
def sync_media_file(): while True: cur_hour = int(time.strftime('%H')) if cur_hour >= syncconfig.start_at and cur_hour < syncconfig.stop_at: try: event = yield dbutil.do(Event.select().where( (Event.syncfinish != 1) | (Event.syncfinish >> None)).order_by(+Event.createtime).get) logging.info('开始处理活动[#{0}] 的media文件'.format(event.code)) if event.mediaids: all = json.loads(event.mediaids) logging.debug('all ' + str(all)) if event.syncstatus: uploaded = json.loads(event.syncstatus) else: uploaded = [] if event.imgurls: img_urls = json.loads(event.imgurls) else: img_urls = {} to_upload = list(set(all) - set(uploaded)) logging.info('待处理media文件清单' + str(to_upload)) results = yield [wxutil.process_temp_resource(media_id) for media_id in to_upload] for i in range(0, len(results)): if results[i] is not None: uploaded.append(to_upload[i]) img_urls[to_upload[i]] = results[i] event.syncstatus = json.dumps(uploaded, ensure_ascii=False) event.imgurls = json.dumps(img_urls, ensure_ascii=False) logging.info('已完成media文件清单:' + str(uploaded)) if set(all) == set(uploaded): logging.info('活动[#{0}] 所有media文件已完成'.format(event.code)) else: logging.info('mediaids不存在,没有media文件需要同步') event.syncfinish = 1 yield dbutil.do(event.save) except DoesNotExist: logging.info('无待同步media文件, sleep...') yield gen.sleep(60 * 10) except Exception as e: logging.exception('同步media文件发生异常:{0}, 退出...'.format(str(e))) break else: yield gen.sleep(60 * 10)
def post(self): try: data = json.loads(self.request.body.decode()) code = data['eventcode'] openid = data['openid'] star = None try: star = yield dbutil.do(Star.select().where( (Star.eventcode == code) & (Star.openid == openid) ).get) except: pass if star: if star.status == 1: # 已经关注 raise RuntimeError else: # 已经取消关注 star.status = 1 star.updatetime = datetime.datetime.now() else: # 首次关注 results = yield [ dbutil.do(Event.select(Event.id, Event.code).where(Event.code == code).get), dbutil.do(User.select(User.id, User.openid).where(User.openid == openid).get) ] event = results[0] user = results[1] star = Star( eventid=event.get_id(), eventcode=event.code, userid=user.get_id(), openid=user.openid, createtime=datetime.datetime.now(), status=1 ) yield dbutil.do(star.save) self.write(Response(status=1, msg='ok', result={}).json()) except RuntimeError as e: self.write(Response(msg='sorry,亲,用户已经关注该活动').json()) logging.exception('StarEventHandler error: {0}'.format(str(e))) except Exception as e: self.write(Response(msg='sorry,亲,关注活动失败').json()) logging.exception('StarEventHandler error: {0}'.format(str(e)))
def display_latest(): query = Event.select().order_by(Event.time)[:100] for event in query: print event.to_string()