class TestParseWorker(unittest.TestCase): """Parsing task handler test cases """ def setUp(self): self.application = webapp.WSGIApplication([ (WORKER['parser'] + ".*",ParseWorker) ],debug=True) self.channel = Channel(title="Test Channel", topic="http://monica-ping.tumblr.com/rss", status="subscribed") self.channel.put() def tearDown(self): pass def testParseAtom(self): """Datastore should have updated if everything goes well""" app = TestApp(self.application) atom = open("test/atom.xml", "r").read() doc = feedparser.parse(atom) response = app.post(WORKER["parser"] + str(self.channel.key()), params=atom, content_type="application/atom+xml") channel = Channel.get(self.channel.key()) self.assertEqual(doc.feed.title, channel.title) self.assertEqual(doc.feed.id, channel.uid) self.assertEqual(len(doc.entries), channel.entry_set.count()) for e in doc.entries: entry = channel.entry_set.filter("uid =", e.id).get() self.assertEqual(e.title, entry.title) self.assertEqual(e.id, entry.uid) def testParseRecurrentAtom(self): """Override entity when an entry being parsed more than once The entry existed in the datastore should be updated, instead of inserted. We simply assert the counts of the entries not changed. """ app = TestApp(self.application) atom = open("test/atom.xml", "r").read() doc = feedparser.parse(atom) key = self.channel.key() response = app.post(WORKER["parser"] + str(key), params=atom, content_type="application/atom+xml") oldcount = Channel.get(key).entry_set.count() # Rework the parsing task response = app.post(WORKER["parser"] + str(key), params=atom, content_type="application/atom+xml") newcount = Channel.get(key).entry_set.count() self.assertEqual(oldcount, newcount)
def play_game(input, channel, user): """Utilizes input from slack channel to process game play""" if input[0] == 'play': if len(input) == 1: message = """You need to tag someone to play! \n TYPE: '/ttt play @an_awesome_person' """ return send_message(channel, message) elif Channel.query_channel_game is True: #Querying channel to see if game in plan message = """Sorry game in play! TYPE:" '/ttt board' to show the board! """ return send_message(channel, message) else: Channel.link_game_channel(channel, user, input[1]) message = """Time to play! \n From left to right, top to bottom the spaces are numbers 1-9 TYPE: '/ttt move (then your number)' to make a move!""" return send_message(channel, message) elif input[0] == 'board': return send_message(channel, display_board(channel)) elif input[0] == 'move': if input[1] is None: message = """Please specify space!""" return send_message(channel, message) else: if Move.whose_turn(channel) != user: message = "Not your turn!" return send_message(channel, message) elif Move.move_made(input[1], channel): message = "Move already made!" return send_message(channel, message) else: Move.create_move(channel, user, input[1]) is_game_over = Move.game_over(channel, user) if (is_game_over[0] is True): Move.clear_game(channel) return send_message(channel, "Yay, you won!") else: if (Move.board_full(channel) is True): message = "Cat's game! Try again." Move.clear_game(channel) return send_message(channel, message) else: message = "Your turn: " + Move.whose_turn(channel) send_message(channel, display_board(channel)) return send_message(channel, message) return
def get(self): logging.info('STARTING PROGRAMMING BACKEND') self.queue = taskqueue.Queue(name='programming') self.queue.purge() self.queue = taskqueue.Queue(name='twitter') self.queue.purge() channels = Channel.get_public() if not len(channels): no_media = len(Media.all().fetch(10)) == 0 Programming(no_media) # Do fetch if no media channels = Channel.get_public() for c in channels: Programming.set_programming(c.key().name(), queue='programming', fetch_twitter=(not constants.DEVELOPMENT))
def get_articles(userid): user = User.objects(id=userid).first() page = int(request.args.get('page')) per_page = int(request.args.get('per_page')) kws = {'user': user} if request.args.get("status") != None: kws['status'] = int(request.args.get("status")) if request.args.get("channel_id") != None: channel = Channel.objects(id=request.args.get("channel_id")).first() kws['channel'] = channel if request.args.get("begin_pubdate") != None: kws['created__gte'] = request.args.get("begin_pubdate") if request.args.get("end_pubdate") != None: kws['created__lte'] = request.args.get("end_pubdate") articles = Article.objects(**kws) paginated_articles = articles.skip((page - 1) * per_page).limit(per_page) return jsonify({ "message": 'OK', "data": { "total_count": articles.count(), "page": page, "per_page": per_page, "results": articles.to_public_json() } })
def get(self): task = self.request.get("task") if task == "cleanup": # Remove obsolete entries Entry.cleanup() elif task == "subscribe": # Periodically make subscribe request # # Priorily subscribe to newly added channel (i.e. status == None); # if there aren't any, then confirm the least checked subscription. ch = Channel.all().filter("status =", None).get() if not ch: ch = Channel.all().filter("status =", "subscribed").order("lastcheck").get() ch.subscribe() else: self.error(404)
def post(self, client_id): #TODO: we should use memcached for performance. for ch in Channel.query(): id = ch.key.id() if id == client_id: continue channel.send_message(id, self.request.body)
def post(self): client_id = self.request.get('from') logging.error('disconnected!: {}'.format(client_id)) ch = Channel.get_by_id(client_id) if ch: ch.key.delete()
def handle_def(self, bot, update): text = update.message.text logger.debug("/def received: %s", text) channel_telegram_id = update.message.chat.id channel_telegram_name = update.message.chat.username user_telegram_id = update.message.from_user.id user_telegram_name = update.message.from_user.username or update.message.from_user.first_name or "Unknown" reply = update.message.reply_text parts = text.split(None, 2) if len(parts) == 3: term = parts[1].lower() term_content = parts[2] user = User.find_create(user_telegram_id, user_telegram_name) channel = Channel.find_create(channel_telegram_id, channel_telegram_name) Definition.insert_update(user, channel, term, term_content) logger.debug("Saved definition for %s", term) reply("Your definition for term '{}' has been saved".format(term)) else: reply( "Please provide a term and it's content to create or update.")
def setUp(self): self.application = webapp.WSGIApplication([ (WORKER['parser'] + ".*",ParseWorker) ],debug=True) self.channel = Channel(title="Test Channel", topic="http://monica-ping.tumblr.com/rss", status="subscribed") self.channel.put()
def render_sec_featured(self): """Render featured section""" query = Channel.all().filter("featured =", True) featured_channels = query.fetch(6) entries = [ch.latest_entry for ch in featured_channels if ch.latest_entry] path = templatepath("_featured.html") return template.render(path, {"entries":entries})
def setUp(self): self.application = webapp.WSGIApplication([ (WORKER['subbub'] + ".*",PushCallback) ],debug=True) self.channel = Channel(title="Test Channel", topic="http://dummychannel.dev/atom", status="subscribing") self.channel.put()
def get_channels(): channels = Channel.objects() return jsonify({ 'massage': 'ok', 'data': { 'channels': channels.to_public_json() } })
def clear_programs(cls, cid=None, all=False): if cid: channel = Channel.get_by_key_name(cid) Programming.clear_channel(channel) elif all: Programming.clear() else: Programming.clear_channels()
def client_get_channels(userid): channels = Channel.objects() return jsonify({ "message": 'OK', "data": { "channels": channels.to_public_json() } })
def delete_user_channel(userid,channelid): user = User.objects(id=userid).first() channel_del = Channel.objects(id=channelid).first() user.channels.remove(channel_del) user.save() return jsonify({ "message": 'OK', "data": {} })
def parseChannels(self, song): for port in range(4): for channelno in range(16): instrument = self.readSignedInt() channel = Channel(instrument) channel.port = port channel.number = channelno channel.volume = self.readByte() channel.balance = self.readByte() channel.chorus = self.readByte() channel.reverb = self.readByte() channel.phaser = self.readByte() channel.tremolo = self.readByte() blank1 = self.readByte() blank2 = self.readByte() song.channels.append(channel)
def add_youtube_feeds(cls): channels = Channel.all().fetch(100) for channel in channels: for keyword in channel.keywords: yt_service = gdata.youtube.service.YouTubeService() gdata.alt.appengine.run_on_appengine(yt_service) uri = Programming.YOUTUBE_FEED % ('most_popular', keyword) feed = yt_service.GetRecentlyFeaturedVideoFeed() medias = Media.add_from_entry(feed.entry) for media in medias: Program.add_program(channel, media)
def user_add_channel(userid): user = User.objects(id=userid).first() body = request.json channels = body.get('channels') channel_id = channels[0]['id'] channel_add = Channel.objects(id=channel_id).first() user.channels.append(channel_add) user.save() return jsonify({ "message": 'OK', "data": {} })
def set_new_selected(self,image): #mudo o status do antigo canal selecionado pra false self.channel_master.selected = False #busco o novo canal que foi clicado no banco e seto as coordenadas do frame new_channel = Channel.get(Channel.id==int(image.id)) new_channel.selected = True new_channel.x = image.x new_channel.y = image.y new_channel.width = image.width new_channel.height = image.height self.channel_master = new_channel self.selected_box = image.parent
def testAllParamsOK(self): """Expect 200 OK if all params match. Expect hub.challenge. """ challenge = "venus" response = self.verify() self.assertEqual("200 OK", response.status) self.assertEqual(challenge, response.body) # Refetch the instance from the datastore, # so its attributes get updated. channel = Channel.get(self.channel.key()) self.assertEqual(channel.status, "subscribed")
def testParseRecurrentAtom(self): """Override entity when an entry being parsed more than once The entry existed in the datastore should be updated, instead of inserted. We simply assert the counts of the entries not changed. """ app = TestApp(self.application) atom = open("test/atom.xml", "r").read() doc = feedparser.parse(atom) key = self.channel.key() response = app.post(WORKER["parser"] + str(key), params=atom, content_type="application/atom+xml") oldcount = Channel.get(key).entry_set.count() # Rework the parsing task response = app.post(WORKER["parser"] + str(key), params=atom, content_type="application/atom+xml") newcount = Channel.get(key).entry_set.count() self.assertEqual(oldcount, newcount)
def save_channel_in_database(self, name, type, selected_image): mainwindow = App.get_running_app().root #try: if Channel.select().count() > 0: selected_channel = Channel.get(Channel.selected == True) if selected_channel: selected_channel.selected = False print("tinha um canal selecionado anteriormente") self.gridchannels.channel_master = Channel.create( name=name, type=type, pin=1, note=38, threshold=10, scan=20, mask=20, retrigger=7, gain=15, curve=0, curveform=100, xtalk=0, xtalkgroup=0, image="resources/images/PNG/" + selected_image + ".png", x=0.0, y=0.0, width=0.0, height=0.0, selected=True, active=True) self.dismiss() self.gridchannels.list_channels = Channel.select().order_by(Channel.id) #adiciona canal no grid self.gridchannels.insert_channel_in_grid( self.gridchannels.channel_master) self.gridchannels.set_values_selected()
def remove_channel_in_database(self,popup,btn): popup.dismiss() self.channel_master.delete_instance() if self.selected_box: self.ids.grid.remove_widget(self.selected_box) self.list_channels = Channel.select().order_by(Channel.id) if len(self.list_channels) > 0: self.channel_master = self.list_channels[len(self.list_channels)-1] self.channel_master.selected = True self.set_values_selected() self.remove_frame() #self.set_background_channel() else: self.set_grid_channels()
def clear_channels(cls): channels = Channel.all().fetch(None) memcache.delete('channels') memcache.delete('programming') for c in channels: c.next_time = None c.put() chan_programs = c.programs.fetch(None) for cp in chan_programs: try: cp.program.delete() except: pass cp.delete()
def upload_article(userid, articleId): data = request.json print(data) channel = Channel.objects(id=data.get('channel_id')).first() article = Article.objects(id=articleId).first() d_cover = article.covers cover = Cover(type=data.get('cover')['type'], images=data.get('cover')['images']).save() article.title = data.get('title') article.channel = channel article.content = data.get('content') article.covers = cover d_cover.delete() article.save() return jsonify({'message': 'ok'})
def silent_all_channels(self): self.make_thread(self.thread_progress()) list_channels = Channel.select().order_by(Channel.id) hhc = 0 for channel in list_channels: if channel.note == 4: hhc = channel.pin channel.active = False channel.save() for i in range(16): if self.ser.isOpen(): self.sendGain(i,0.0) if i == hhc: self.sendThreshold(i,100.0) print("zerou") self.selected.set_values(self.channel_master)
def clear_model(cls, model): channels = Channel.all().fetch(None) memcache.delete('channels') memcache.delete('programming') for c in channels: c.next_time = None c.programming = [] c.put() try: while True: q = db.GqlQuery("SELECT __key__ FROM " + model) assert q.count() db.delete(q.fetch(200)) time.sleep(0.5) except Exception, e: pass
def silent_channel(self,image): print("mutou ",image.id) channel = Channel.get(Channel.id==int(image.id)) channel.active = not channel.active channel.save() if self.ser.isOpen(): if not channel.active: self.sendGain(channel.pin,0) if channel.note == 4: self.sendThreshold(channel.pin,100) image.source = "resources/icons/icons_no_silence.png" else: self.sendGain(channel.pin,int(self.selected.dict_controls["Gain"].value)) if channel.note == 4: self.sendThreshold(channel.pin,self.selected.dict_controls["Threshold"].value) image.source = "resources/icons/icons_silence.png"
def testParseAtom(self): """Datastore should have updated if everything goes well""" app = TestApp(self.application) atom = open("test/atom.xml", "r").read() doc = feedparser.parse(atom) response = app.post(WORKER["parser"] + str(self.channel.key()), params=atom, content_type="application/atom+xml") channel = Channel.get(self.channel.key()) self.assertEqual(doc.feed.title, channel.title) self.assertEqual(doc.feed.id, channel.uid) self.assertEqual(len(doc.entries), channel.entry_set.count()) for e in doc.entries: entry = channel.entry_set.filter("uid =", e.id).get() self.assertEqual(e.title, entry.title) self.assertEqual(e.id, entry.uid)
def get(self): """Handling PuSH verification Response: 2xx hub.challenge We agree with the action, the token matched, the topic found. 404 Disagree with the action, please don't retry. 4xx / 5xx Temporary failure, please retry later. """ logging.info("Upon verification: %s from %s" % (self.request.url, self.request.remote_addr)) token = self.request.get("hub.verify_token") if token != HUB["token"]: # token not match self.error(404) logging.error("Token not match: %s from %s" % (self.request.url, self.request.remote_addr)) return # fail fast # PuSH verification will come at WORKER['subbub'] + `key` # path = WORKER['subbub'] + "key" key = self.request.path[len(WORKER["subbub"]) :] try: channel = Channel.get(key) except: logging.error("Broken key: %s from %s" % (self.request.path, self.request.remote_addr)) self.error(404) else: if channel: mode = self.request.get("hub.mode") topic = self.request.get("hub.topic") if mode and topic and channel.status == mode[:-1] + "ing" and channel.topic == topic: channel.status = mode + "d" channel.put() logging.info("Verify success: %s to %s" % (channel.status, channel.topic)) self.response.out.write(self.request.get("hub.challenge")) else: logging.error("Status or topic not match: %s" % self.request.url) self.error(404) else: # Topic not found logging.error("Channel key not found: %s" % key) self.error(412)
def seed_channel(token): print "seeding channel" url = 'https://slack.com/api/team.info?token={}&pretty=1'.format(token) response = requests.get(url) js = response.json() team_id = js['team']['id'] team_name = js['team']['domain'] channel = Channel( channel_id=team_id, cohort_name=team_name, ) db.session.add(channel) db.session.commit()
def post(self): key = self.request.path[len(WORKER["subscriber"]) :] try: channel = Channel.get(key) except: logging.error("Broken channel key: %s" % self.request.path) return action = self.request.get("hub.mode") if not action: logging.error("hub.mode not found in payload: %s from %s" % (self.request.body, self.request.url)) self.error(204) if channel: if action == "subscribe": channel.subscribe() else: channel.unsubscribe() else: logging.error("Channel key not found: %s" % self.request.path) self.error(204)
def post(self): """Handle PuSH notifications Response: 2xx Notification received 3xx / 4xx / 5xx Fail, please retry the notification later Atom/rss feed is queued to `ParseWorker` for later parsing """ type = self.request.headers["Content-Type"] # Content-Type not match, respond fast if type not in ["application/atom+xml", "application/rss+xml"]: self.response.headers.__delitem__("Content-Type") self.error(204) return try: key = self.request.path[len(WORKER["subbub"]) :] ch = Channel.get(key) except (db.KindError, db.BadKeyError): logging.error("Broken Key at notification: %s" % self.request.url) self.response.headers.__delitem__("Content-Type") self.response.set_status(204) except: # Datastore Error, please retry notification self.error(500) else: body = self.request.body.decode("utf-8") if not (ch and body): if not ch: logging.error("Key Not Found at notification: %s" % self.request.url) self.response.headers.__delitem__("Content-Type") self.response.set_status(204) else: taskqueue.Task(body, url=WORKER["parser"] + key, headers={"Content-Type": type}).add(queue_name="parse") logging.info("Upon notifications: %s from %s" % (self.request.url, self.request.remote_addr)) self.response.set_status(202)
def remove_program(cls, channel_id, media): from model import Channel cached_programming = memcache.get('programming') or {} new_schedule = [] sched = 0 if cached_programming.get(channel_id): for program in cached_programming[channel_id]: time = iso8601.parse_date(program['time']).replace(tzinfo=None) if program['media']['id'] == media.id: sched += 1 continue if sched: new_time = time - datetime.timedelta(seconds=(sched * media.duration)) program['time'] = new_time.isoformat() new_schedule.append(program) channel = Channel.get_by_key_name(channel_id) channel.next_time = channel.next_time - datetime.timedelta(seconds=(sched * media.duration)) channel.put() cached_programming[channel_id] = new_schedule memcache.set('programming', cached_programming) return new_schedule
class TestVerification(unittest.TestCase): """Verifications Handling Test Cases PubSubHubbub 0.3 Compliant, async mode only: 2xx - Verify success, subscription confirmed 404 - Disagree with the subscription, verify should not be retried xxx - Verify temporarily failed, please retry later """ def setUp(self): self.application = webapp.WSGIApplication([ (WORKER['subbub'] + ".*",PushCallback) ],debug=True) self.channel = Channel(title="Test Channel", topic="http://dummychannel.dev/atom", status="subscribing") self.channel.put() def tearDown(self): self.channel.delete() def get(self, key=None, mode=None, topic=None, challenge=None, token=None): """HTTP GET a verify request""" url = WORKER['subbub'] if key: url += key + "?" if mode: url += "hub.mode=" + mode if topic: url += "&hub.topic=" + topic if challenge: url += "&hub.challenge=" + challenge if token: url += "&hub.verify_token=" + token app = TestApp(self.application) return app.get(url, expect_errors=True) def verify(self, key=None, topic=None, challenge="venus", mode="subscribe", token=HUB["token"]): """Simulate a push verify request """ if not key: key = str(self.channel.key()) if not topic: topic = self.channel.topic response = self.get(key=key, mode=mode, topic=topic, challenge=challenge, token=token) return response def testAllParamsOK(self): """Expect 200 OK if all params match. Expect hub.challenge. """ challenge = "venus" response = self.verify() self.assertEqual("200 OK", response.status) self.assertEqual(challenge, response.body) # Refetch the instance from the datastore, # so its attributes get updated. channel = Channel.get(self.channel.key()) self.assertEqual(channel.status, "subscribed") def testVerifyTokenNotMatch(self): """Expect 404 Not Found if the verify token not match. The (un)subscribe request must be initiated by someone else, or the token is broken. Hub will not retry. """ response = self.verify(token="brokentoken") self.assertEqual("404 Not Found", response.status) def testCallbackNotMatch(self): """Expect 404 Not Found if callback not found. The key associated with callback url could not be found in the datastore. Hub will not retry. """ response = self.verify(key="randomekeystring") self.assertEqual("404 Not Found", response.status) def testTopicNotMatch(self): """Expect 404 Not Found if topic not match The topic does not match with the record in datastore. Hub will not retry. """ response = self.verify(topic="http://random.dev/atom") self.assertEqual("404 Not Found", response.status) def testModeNotMatch(self): """Expect 404 Not Found if hub.mode not match """ response = self.verify(mode="unsubscribe") self.assertEqual("404 Not Found", response.status)
class TestNotification(unittest.TestCase): """Notification Test Cases PubSubHubbub 0.3: 202 - Notification accepted, added to taskqueue 204 - Ignored, Payload not valid 2xx - General notify success xxx - Fail, please retry the notification later "Subscribers SHOULD respond to notifications as quickly as possible; their success response code SHOULD only indicate receipt of the message, not acknowledgment that it was successfully processed by the subscriber." -- Section 7.3, PubSubHubbub Core 0.3 """ def setUp(self): self.application = webapp.WSGIApplication([ (WORKER['subbub'] + ".*",PushCallback) ],debug=True) self.channel = Channel(title="Test Channel", topic="http://dummychannel.dev/atom", status="subscribed") self.channel.put() self.atom = open("test/atom.xml", "r").read() def tearDown(self): self.channel.delete() def notify(self, key, type, body): """HTTP POST notification """ app = TestApp(self.application) if type == "rss": ct = "application/rss+xml" elif type == "atom": ct = "application/atom+xml" else: ct = type response = app.post(WORKER["subbub"] + key, params=body, content_type=ct, expect_errors=True) return response def testNotifyAtomAsAtom(self): """Expect 202 Accepted if notify atom as atom """ response = self.notify(str(self.channel.key()), "atom", self.atom) self.assertEqual("202 Accepted", response.status) def testNotifyAtomAsRss(self): """Expect 202 Accepted if notify atom as rss Success regardless of content-type not match. """ response = self.notify(str(self.channel.key()), "rss", self.atom) self.assertEqual("202 Accepted", response.status) def testNotifyKeyBroken(self): """Expect 204 No Content if the notify key broken We do not support aggregated atom feeds for now. """ response = self.notify("brokenkeystring", "atom", self.atom) self.assertEqual("204 No Content", response.status) def testNotifyKeyMissing(self): """Expect 204 No Content if the notify key missing """ response = self.notify("", "atom", self.atom) self.assertEqual("204 No Content", response.status) def testNotifyEmptyPayload(self): """Expect 204 No Content if the notify payload empty """ response = self.notify(str(self.channel.key()), "atom", "") self.assertEqual("204 No Content", response.status) def testBadContentType(self): """Expect 204 No Content if notify content type not match """ response = self.notify(str(self.channel.key()), "application/x-www-form-urlencoded", self.atom) self.assertEqual("204 No Content", response.status) def testMissingContentType(self): """Expect 204 No Content if notify content type missing """ response = self.notify(str(self.channel.key()), "", self.atom) self.assertEqual("204 No Content", response.status)
def set_grid_channels(self): self.canvas.remove(self.line_selection)#limpa o clicado self.list_channels = Channel.select().order_by(Channel.id) #busca os channels self.ids.grid.clear_widgets()#limpa a grid for channel in self.list_channels:#itera nos canais self.insert_channel_in_grid(channel)
def add_channel(channel): """ Suggest a channel :return: {ok: bool, msg: str} """ if not matches_channel_name(channel): return Response(response=json.dumps({ "ok": False, "msg": "invalid channel", }), status=400, content_type="application/json") j = loop.run_until_complete(telethon_api.get_channel_info(channel)) if not j: return Response(response=json.dumps({ "ok": False, "msg": "this channel does not exist", }), status=404, content_type="application/json") if "full_chat" not in j or "id" not in j["full_chat"]: return Response(response=json.dumps({ "ok": False, "msg": "Telethon API returned invalid channel", }), status=500, content_type="application/json") existing = db.get_channel_by_id(j["full_chat"]["id"]) if existing: return Response(response=json.dumps({ "ok": False, "msg": "This channel is already being ingested", }), status=400, content_type="application/json") db.upsert_channel( Channel( channel_id=j["full_chat"]["id"], channel_name=channel, updated_utc=int(time.time()), retrieved_utc=int(time.time()), min_message_id=0, max_message_id=0, is_active=True, # What is that? is_complete=False, )) return Response(response=json.dumps({ "ok": True, "msg": "ok", }), status=200, content_type="application/json")
def set_programming(cls, channel_id, duration=3600, schedule_next=False, fetch_twitter=True, queue='programming', target=None, kickoff=False): import broadcast import constants from model import Channel from model import Program # Stored programming programming = memcache.get('programming') or {} onlineUsers = memcache.get('web_channels') or {} logging.info('programming: ' + channel_id) next_programs = Programming.next_programs(programming.get(channel_id, []), duration, prelude=300) gap = Programming.gap(programming.get(channel_id, []), duration) logging.info('GAP: ' + str(gap)) if programming.get(channel_id) and len(programming[channel_id]) and \ programming[channel_id][0]['media'].get('live') == True: logging.info('live tweets') # Update tweets for live events media = Media.get_by_key_name(programming[channel_id][0]['media']['id']) deferred.defer(Programming.fetch_related_tweets, [], _name='twitter-' + channel_id + '-' + str(uuid.uuid1()), _queue='twitter') programs = [] if not programming.get(channel_id) or gap > 60: logging.info('PROGRAOMMING') channel = Channel.get_by_key_name(channel_id) #channel.update_next_time() viewers = (memcache.get('channel_viewers') or {}).get(str(channel_id), []) cols = channel.get_collections() all_medias = [] backup_medias = [] limit = 100 for col in cols: medias = [] filtered_medias = [] offset = 0 while offset <= 400: medias = col.get_medias(limit=limit, offset=offset) logging.info('fetched medias: ' + str(len(medias))) if not len(medias): break backup_medias += medias # Dont reprogram anytihng already scheduled filtered_medias = Programming.no_reprogram(programming.get(channel_id, []), medias) # Don't repeat the same program within two hour cutoff = 7200 if col.lifespan else 43200 filtered_medias = [c for c in filtered_medias if not c.last_programmed or (datetime.datetime.now() - c.last_programmed).seconds > cutoff] # At most, 30% of the audience has already "witnessed" this program # filtered_medias = [m for m in filtered_medias if not len(viewers) or # float(len(Programming.have_seen(m, viewers)))/len(viewers) < .3] all_medias += filtered_medias logging.info('all medias: ' + str(len(all_medias))) offset += limit all_medias = backup_medias if not len(all_medias) else all_medias # Don't repeat already programmed # all_medias = Programming.no_reprogram(next_programs, all_medias) # StorySort algorithm # all_medias = Programming.story_sort(all_medias) # Only one publisher per story all_medias = Programming.unique_publishers(all_medias) # Grab "duration" seconds of programming all_medias = Programming.timed_subset(all_medias, duration) if fetch_twitter: # Find related twitter posts deferred.defer(Programming.fetch_related_tweets, all_medias, _name='twitter-' + channel.name.replace(' ', '') + '-' + str(uuid.uuid1()), _queue='twitter', _countdown=30) # Truncate old programs programming[channel_id] = Programming.cutoff_programs(programming.get(channel_id), 300) for media in all_medias: program = Program.add_program(channel, media, min_time=datetime.datetime.now(), max_time=(datetime.datetime.now() + datetime.timedelta(seconds=duration))) logging.info(program) if program: if not programming.get(channel_id, None): programming[channel_id] = [] programming.get(channel_id).append(program.toJson(fetch_channel=False, fetch_media=True, media_desc=False, pub_desc=False)) programs.append(program) logging.info('ADDING: ' + media.name + ' at: ' + program.time.isoformat()) if len(pickle.dumps(programming)) > 1000000: # We can only fit 1mb into memcache break if len(programs): broadcast.broadcastNewPrograms(channel, programs) memcache.set('programming', programming) channels = memcache.get('channels') or [] updated = False for i,c in enumerate(channels): if c['id'] == channel_id: channels[i] = channel.toJson(get_programming=False) updated = True if not updated: channels.append(channel.toJson(get_programming=False)) memcache.set('channels', channels) # Schedule our next programming selection if schedule_next and (not constants.SLEEP_PROGRAMMING or (constants.SLEEP_PROGRAMMING and (kickoff or len(onlineUsers.keys())))): logging.info('NUMBER OF PROGRAMS: ' + str(len(programs))) if len(programs) > 1: next_gen = (programs[-2].time - datetime.datetime.now()).seconds / 2 elif len(programs) == 1: next_gen = programs[0].media.duration / 2 else: next_gen = 60 next_gen = min(next_gen, reduce(lambda x, y: x + y, [p.media.duration for p in programs], 0) \ if len(programs) else 10) next_gen = min(next_gen, duration / 2) logging.info('COUNTDOWN FOR ' + channel_id + ': ' + str(next_gen)) deferred.defer(Programming.set_programming, channel_id, fetch_twitter=fetch_twitter, _name=channel_id + '-' + str(uuid.uuid1()), _countdown=next_gen, _queue=queue) return programs
def post(self): """Parsing queued feeds""" doc = feedparser.parse(self.request.body) # Bozo feed handling # stealed from PubSubHubbub subscriber repo if doc.bozo: logging.error("Bozo feed data. %s: %r", doc.bozo_exception.__class__.__name__, doc.bozo_exception) if hasattr(doc.bozo_exception, "getLineNumber") and hasattr(doc.bozo_exception, "getMessage"): line = doc.bozo_exception.getLineNumber() logging.error("Line %d: %s", line, doc.bozo_exception.getMessage()) segment = self.request.body.split("\n")[line - 1] logging.info("Body segment with error: %r", segment.decode("utf-8")) return # fail fast # WORKER['parser'] + `key` key = self.request.path[len(WORKER["parser"]) :] # Try to get the channel by key; # fallback to feed id, if not found; # and at last we'll resort to entry source id, # to find out the associated channel channel = None uid = doc.feed.id try: channel = Channel.get(key) except: channel = Channel.all().filter("uid =", uid).get() else: # First time get the notification, # so update channel's properties if channel and not channel.uid: channel.title = doc.feed.title.split(" - ")[0] channel.uid = uid # Fallback to topic feed, if no link found channel.link = doc.feed.get("link", channel.topic) channel.put() updates = [] for e in doc.entries: author = e.author if e.get("author") else None content = e.content[0].value if e.get("content") else e.summary # Fallback to published if no updated field. t = e.updated_parsed if e.get("updated_parsed") else e.published_parsed updated = datetime(t[0], t[1], t[2], t[3], t[4], t[5]) # If we have this entry already in datastore, then the entry # should be updated instead of inserted. ent = Entry.all().filter("uid =", e.id).get() if not ent: if not channel: uid = e.source.id channel = Channel.all().filter("uid =", uid).get() ent = Entry( title=e.title, link=e.link, content=content, author=author, updated=updated, uid=e.id, channel=channel, ) logging.info("Get new entry: %s" % e.id) else: ent.title = e.title ent.link = e.link ent.content = content ent.author = author ent.updated = updated logging.info("Get updated entry: %s" % e.id) updates.append(ent) db.put(updates)
def get(self): context = {} context['channels'] = Channel.all().order("title").fetch(1000) path = templatepath("following.html") self.response.out.write(template.render(path, context))
def post(self): client_id = self.request.get('from') logging.error('connected!: {}'.format(client_id)) Channel(id=client_id).put()
def ingest_channel(channel_name: str, channel_id: int, stop_point: int = None): BATCH_SIZE = 250 current_message_id = None max_message_id = None min_message_id = None total_messages = 0 seen_ids = set() stop_flag = False while True: es_records = [] pg_records = [] logger.debug( "Fetching %d ids (in descending order) from %s starting at id %s" % (BATCH_SIZE, channel_name, current_message_id)) messages = telethon_api.fetch_messages( channel=channel_name, size=BATCH_SIZE, max_id=current_message_id, ) retrieved_utc = int(time.time()) for m in messages: message_id = m.id if stop_point and message_id <= stop_point: stop_flag = True break if message_id in seen_ids: logger.warning("Message id %d was already ingested" % (message_id, )) seen_ids.add(message_id) total_messages += 1 if current_message_id is None or message_id < current_message_id: current_message_id = message_id if min_message_id is None or message_id < min_message_id: min_message_id = message_id if max_message_id is None or message_id > max_message_id: max_message_id = message_id message_channel_id = m.to_id.channel_id if message_channel_id != channel_id: logger.warning("Message channel id for %s does not match" "expected value. %d != %d" % (channel_name, message_channel_id, channel_id)) record_id = (message_channel_id << 32) + message_id data = m.to_json() updated_utc = retrieved_utc es_records.append( translate_message_for_es(m, channel_name, retrieved_utc)) pg_records.append( Message( record_id=record_id, message_id=message_id, channel_id=channel_id, retrieved_utc=retrieved_utc, updated_utc=updated_utc, data=data, )) db.insert_messages(pg_records) es.bulk_insert(es_records) if stop_flag: break time.sleep(1) # TODO: rate limit decorator logger.debug("A total of %d messages were ingested for channel %s" % (total_messages, channel_name)) # TODO: Should we update this at every iteration? # This way if this crashes halfway through it can resume if total_messages > 0: db.upsert_channel( Channel( channel_id=channel_id, channel_name=channel_name, updated_utc=int(time.time()), retrieved_utc=int(time.time()), min_message_id=min_message_id, max_message_id=max_message_id, is_active=True, is_complete=True, ))
def get_channel(): from model import Channel try: return Channel.get(Channel.username == SELF_CHANNEL_USERNAME) except Channel.DoesNotExist: return False