def getNewWechatInfo(): ''' modify by bigzhu at 15/07/20 00:00:12 自动转化python的时间类型 ''' wechat = WechatBasic(token=token, appid=appid, appsecret=appsecret) the_access_token = wechat.get_access_token() access_token = the_access_token['access_token'] access_token_expires_at = the_access_token['access_token_expires_at'] ticket_info = wechat.get_jsapi_ticket() jsapi_ticket = ticket_info['jsapi_ticket'] jsapi_ticket_expires_at = ticket_info['jsapi_ticket_expires_at'] access_token_expires_at = time_bz.timestampToDateTime(access_token_expires_at) jsapi_ticket_expires_at = time_bz.timestampToDateTime(jsapi_ticket_expires_at) return wechat, access_token, access_token_expires_at, jsapi_ticket, jsapi_ticket_expires_at
def saveBlogs(user_name, blogs, offset): if blogs: pass else: return for blog in blogs: m = public_bz.storage() m.id_str = blog['id'] m.m_type = 'tumblr' m.m_user_id = user_name m.created_at = time_bz.timestampToDateTime(blog['timestamp']) m.extended_entities = json.dumps(blog.get('photos')) m.content = None m.text = blog.get('caption') m.href = blog.get('short_url') m.type = blog.get('type') result = pg.insertIfNotExist(pg, 'm', m, "id_str='%s' and m_type='tumblr' " % m.id_str) if result is None: # 有重复记录了,就不再继续了 print 'have same data' return else: print 'new ', m.id_str, m.type, 'offset:', offset, 'name:', user_name # 继续取 new_offset = offset + 20 new_blogs = callGetMeidaApi(user_name, offset=new_offset)['response']['posts'] saveBlogs(user_name, new_blogs, new_offset)
def saveMessage(god_name, twitter_name, god_id, blog): m = public_bz.storage() m.god_id = god_id m.god_name = god_name m.name = twitter_name m.id_str = blog['id'] m.m_type = 'tumblr' m.created_at = time_bz.timestampToDateTime(blog['timestamp']) type = blog.get('type') m.href = blog.get('short_url') m.type = type if type == 'text': m.title = blog.get('title') m.text = blog.get('body') elif type == 'photo': m.text = blog.get('caption') m.extended_entities = json.dumps(blog.get('photos')) elif type == 'video': m.extended_entities = json.dumps({'video_url': blog.get('video_url')}) m.content = None id = pg.insertIfNotExist('message', m, "id_str='%s' and m_type='tumblr' " % m.id_str) if id is None: pass else: print '%s new tumblr message %s' % (m.name, m.id_str)
def saveBlogs(user_name, blogs, offset): if blogs: pass else: return for blog in blogs: blog['created_date'] = time_bz.timestampToDateTime(blog['timestamp']) del blog['timestamp'] blog['id_str'] = blog['id'] del blog['id'] del blog['date'] del blog['recommended_source'] del blog['recommended_color'] del blog['highlighted'] blog['user_name'] = user_name blog['tags'] = json.dumps(blog.get('tags')) blog['reblog'] = json.dumps(blog.get('reblog')) blog['trail'] = json.dumps(blog.get('trail')) blog['photos'] = json.dumps(blog.get('photos')) blog['post_author'] = json.dumps(blog.get('post_author')) blog['player'] = json.dumps(blog.get('player')) blog['dialogue'] = json.dumps(blog.get('dialogue')) result = pg.insertIfNotExist(pg, 'tumblr_blog', blog, "id_str='%s'" % blog['id_str']) if result is None: # 有重复记录了,就不再继续了 print 'have some data' return else: print 'new ', blog['id_str'], blog['type'], 'offset:', offset # 继续取 new_offset = offset + 20 new_blogs = callGetMeidaApi(user_name, offset=new_offset)['response']['posts'] saveBlogs(user_name, new_blogs, new_offset)
def getNewWechatInfo(): ''' modify by bigzhu at 15/07/20 00:00:12 自动转化python的时间类型 modify by bigzhu at 15/09/13 17:25:43 为了用订阅号给指定用户发消息,改用WechatExt ''' wechat = WechatBasic(token=token, appid=appid, appsecret=appsecret) the_access_token = wechat.get_access_token() access_token = the_access_token['access_token'] access_token_expires_at = the_access_token['access_token_expires_at'] ticket_info = wechat.get_jsapi_ticket() jsapi_ticket = ticket_info['jsapi_ticket'] jsapi_ticket_expires_at = ticket_info['jsapi_ticket_expires_at'] access_token_expires_at = time_bz.timestampToDateTime(access_token_expires_at) jsapi_ticket_expires_at = time_bz.timestampToDateTime(jsapi_ticket_expires_at) return wechat, access_token, access_token_expires_at, jsapi_ticket, jsapi_ticket_expires_at
def saveLast(last_time, last_message_id, user_id): ''' create by bigzhu at 15/08/16 16:22:39 保存最后一条的message ''' last_time = int(last_time) datetime_last_time = time_bz.timestampToDateTime(last_time, millisecond=True) id = db_bz.insertIfNotExist(pg, 'last', {'user_id': user_id, 'last_time': datetime_last_time, 'last_message_id': last_message_id}, "user_id=%s" % user_id) if id is None: count = pg.update('last', where='last_time< to_timestamp(%s/1000) and user_id=%s' % (last_time, user_id), last_message_id=last_message_id, last_time=datetime_last_time) return count return id
def getNewWechatInfo(): ''' modify by bigzhu at 15/07/20 00:00:12 自动转化python的时间类型 modify by bigzhu at 15/09/13 17:25:43 为了用订阅号给指定用户发消息,改用WechatExt ''' conf = WechatConf( token=token, appid=appid, appsecret=appsecret, encrypt_mode='compatible', encoding_aes_key=encoding_aes_key ) wechat = WechatBasic(conf=conf) the_access_token = wechat.get_access_token() access_token = the_access_token['access_token'] access_token_expires_at = the_access_token['access_token_expires_at'] ticket_info = wechat.get_jsapi_ticket() jsapi_ticket = ticket_info['jsapi_ticket'] jsapi_ticket_expires_at = ticket_info['jsapi_ticket_expires_at'] access_token_expires_at = time_bz.timestampToDateTime(access_token_expires_at) jsapi_ticket_expires_at = time_bz.timestampToDateTime(jsapi_ticket_expires_at) return wechat, access_token, access_token_expires_at, jsapi_ticket, jsapi_ticket_expires_at
def saveGraphqlMessage(ins_name, user_name, god_id, message): ''' 用 Graphql 取到的数据 ''' message = storage(message) m = public_bz.storage() m.god_name = user_name m.name = ins_name m.m_type = 'instagram' m.id_str = message.id m.created_at = time_bz.timestampToDateTime(message.taken_at_timestamp) if message.get('edge_media_to_caption').get('edges'): m.text = message.get('edge_media_to_caption').get('edges')[0].get( 'node').get('text') else: m.text = None m.extended_entities = json.dumps({'url': message.display_url}) m.href = 'https://www.instagram.com/p/%s/' % message.shortcode if message.__typename == 'GraphSidecar': # mutiple image edges = getMutipleImage(message.shortcode) images = [] for edge in edges: url = edge['node']['display_url'] images.append({'url': url}) m.extended_entities = json.dumps(images) m.type = 'images' elif message.is_video: m.type = 'video' video_url = getVideoUrl(m.href) m.extended_entities = json.dumps({ 'url': message.display_url, 'video_url': video_url }) else: m.type = 'image' id = pg.insertIfNotExist('message', m, "id_str='%s' and m_type='instagram'" % m.id_str) if id is not None: print '%s new instagram message %s' % (m.name, m.id_str) # 肯定会有一条重复 # else: # print '%s 重复记录 %s' % (m.user_name, m.id_str) return id
def put(self): self.set_header("Content-Type", "application/json") data = json.loads(self.request.body) last_time = int(data.get('last_time')) last_time = time_bz.timestampToDateTime(last_time, True) # last_message_id = data.get('message_id') user_id = self.current_user if user_id is None: pass else: last_oper.saveLast(last_time, user_id) data = storage() data.error = OK data.unread_message_count = oper.getUnreadCount(user_id) self.write(json.dumps(data, cls=json_bz.ExtEncoder))
def get(self, parm=None): self.set_header("Content-Type", "application/json") parm = json.loads(parm) before = parm['before'] god_name = parm.get('god_name') search_key = parm.get('search_key') limit = parm.get('limit') if limit is None: limit = 10 before = time_bz.timestampToDateTime(before, True) messages = public_db.getOldMessages(before=before, search_key=search_key, god_name=god_name, limit=limit, user_id=self.current_user) self.write( json.dumps({ 'error': '0', 'messages': messages }, cls=json_bz.ExtEncoder))
def get(self, parm=None): starttime = datetime.datetime.now() self.set_header("Content-Type", "application/json") after = None limit = None search_key = None god_name = None if parm: parm = json.loads(parm) after = parm.get('after') # 晚于这个时间的 limit = parm.get('limit') search_key = parm.get('search_key') god_name = parm.get('god_name') # 只查这个god user_id = self.current_user if after: after = time_bz.timestampToDateTime(after, True) elif search_key is None and god_name is None: # 这些条件查询不能卡上次看到那条的时间 after = last_oper.getLastTime(user_id) messages = public_db.getNewMessages(user_id=user_id, after=after, limit=limit, god_name=god_name, search_key=search_key) data = storage() data.error = OK data.messages = messages data.unread_message_count = oper.getUnreadCount(user_id) if (len(messages) == 0): if (user_id): data.followed_god_count = god_oper.getFollowedGodCount(user_id) else: data.followed_god_count = 0 endtime = datetime.datetime.now() print((endtime - starttime).seconds) self.write(json.dumps(data, cls=json_bz.ExtEncoder))
def get(self, parm): self.set_header("Content-Type", "application/json") parm = json.loads(parm) cat = parm.get('cat') before = parm.get('before') limit = parm.get('limit') if before: before = time_bz.timestampToDateTime(before, True) gods = oper.getGods(self.current_user, cat=cat, is_public=True, limit=limit, before=before) self.write( json.dumps({ 'error': '0', 'gods': gods }, cls=json_bz.ExtEncoder))
def saveMedias(user, medias): ''' create by bigzhu at 15/09/04 20:58:54 保存meedias "attribution":null, "tags":[ ], "type":"image", "location":{ }, "comments":{ }, "filter":"Normal", "created_time":"1441362020", "link":"https:\/\/instagram.com\/p\/7NIHiLJJs3\/", "likes":{ }, "images":{ }, "users_in_photo":[ ], "caption":{ }, "user_has_liked":false, "id":"1066544388859271991_262341", "user":{ } ''' for media_d in medias['data']: media = storage(media_d) db_media = storage() #db_media.attribution = media.attribution #db_media.tags = json.dumps(media.tags, cls=public_bz.ExtEncoder) db_media.type = media.type #db_media.location = json.dumps(media.location, cls=public_bz.ExtEncoder) db_media.comments = json.dumps(media.comments, cls=public_bz.ExtEncoder) db_media.filter = media.filter #db_media.created_time = time_bz.timestampToDateTime(media.created_time) + timedelta(hours=8) db_media.created_time = time_bz.timestampToDateTime(media.created_time) db_media.link = media.link #db_media.likes = json.dumps(media.likes, cls=public_bz.ExtEncoder) db_media.low_resolution = json.dumps(media.images['low_resolution']) db_media.standard_resolution = json.dumps(media.images['standard_resolution']) db_media.thumbnail = json.dumps(media.images['thumbnail']) #db_media.users_in_photo = json.dumps(media.users_in_photo, cls=public_bz.ExtEncoder) if media.caption: caption = media.caption caption['user_id'] = caption['from']['id'] del caption['from'] else: caption = '' db_media.caption = json.dumps(caption, cls=public_bz.ExtEncoder) db_media.id_str = media.id db_media.user_id = user.id id = pg.insertIfNotExist(pg, 'instagram_media', db_media, "id_str='%s'" % db_media.id_str) if id is None: raise Exception('重复记录 id=%s, name=%s' % (media.id, user.username)) else: print 'new=', media.id, user.username if id is not None and len(medias) <= 2: # 新增加消息,微信通知只通知2条以内 openids = public_db.getOpenidsByName('instagram', user.username) for data in openids: if caption != '': text = caption.get('text') else: text = '' wechat_oper.sendInstagram(data.openid, text, media.images['low_resolution']['url'], user.username, id) if medias['pagination']: next_url = medias['pagination']['next_url'] medias = callGetMeidaApi(next_url=next_url) saveMedias(user, medias)
def saveMedias(user, medias): ''' create by bigzhu at 15/09/04 20:58:54 保存meedias "attribution":null, "tags":[ ], "type":"image", "location":{ }, "comments":{ }, "filter":"Normal", "created_time":"1441362020", "link":"https:\/\/instagram.com\/p\/7NIHiLJJs3\/", "likes":{ }, "images":{ }, "users_in_photo":[ ], "caption":{ }, "user_has_liked":false, "id":"1066544388859271991_262341", "user":{ } ''' for media_d in medias['data']: media = storage(media_d) db_media = storage() #db_media.attribution = media.attribution #db_media.tags = json.dumps(media.tags, cls=public_bz.ExtEncoder) db_media.type = media.type #db_media.location = json.dumps(media.location, cls=public_bz.ExtEncoder) db_media.comments = json.dumps(media.comments, cls=public_bz.ExtEncoder) db_media.filter = media.filter #db_media.created_time = time_bz.timestampToDateTime(media.created_time) + timedelta(hours=8) db_media.created_time = time_bz.timestampToDateTime(media.created_time) db_media.link = media.link #db_media.likes = json.dumps(media.likes, cls=public_bz.ExtEncoder) db_media.low_resolution = json.dumps(media.images['low_resolution']) db_media.standard_resolution = json.dumps( media.images['standard_resolution']) db_media.thumbnail = json.dumps(media.images['thumbnail']) #db_media.users_in_photo = json.dumps(media.users_in_photo, cls=public_bz.ExtEncoder) if media.caption: caption = media.caption caption['user_id'] = caption['from']['id'] del caption['from'] else: caption = '' db_media.caption = json.dumps(caption, cls=public_bz.ExtEncoder) db_media.id_str = media.id db_media.user_id_str = user.id #id = pg.insertIfNotExist(pg, 'instagram_media', db_media, "id_str='%s'" % db_media.id_str) m = public_bz.storage() m.id_str = db_media.id_str m.m_type = 'instagram' m.m_user_id = db_media.user_id_str m.created_at = db_media.created_time m.extended_entities = db_media.standard_resolution m.content = db_media.comments if media.caption: m.text = media.caption['text'] else: m.text = None m.href = db_media.link m.type = db_media.type id = pg.insertIfNotExist(pg, 'm', m, "id_str='%s'" % db_media.id_str) if id is None: # 似乎就是会有重复的 error = '重复记录 id=%s, name=%s' % (media.id, user.username) print error #raise Exception(error) else: print 'new=', media.id, user.username # if id is not None and len(medias) <= 2: # 新增加消息,微信通知只通知2条以内 # openids = public_db.getOpenidsByName('instagram', user.username) # for data in openids: # if caption != '': # text = caption.get('text') # else: # text = '' # wechat_oper.sendInstagram(data.openid, text, media.images['low_resolution']['url'], user.username, id) if medias['pagination']: next_url = medias['pagination']['next_url'] medias = callGetMeidaApi(next_url=next_url) saveMedias(user, medias)