def create_project_task(self, cohort_date=datetime.datetime.today()): program_label = 'demo-program' task_label = 'task_project_foo' project = Project.create(program_label=program_label, organization_id='Organization_Foo') task_template = { 'label': task_label, 'data_type': 'radio', 'select_options': [{ 'value': 'normal', 'label': 'true names' }, { 'value': 'alias', 'label': 'aliases' }], } Program.mock_program_config( program_label, {'project_tasklist_template': [{ 'tasks': [task_template] }]}) t = Task.create(task_label, 1, 'checkpoint_foo', parent=project, program_label=program_label) t.put() return t
def set_up(self): """Clear relevant tables from testing SQL database.""" # Let ConsistencyTestCase set up the datastore testing stub. super(TestGraphQLProjectCohort, self).set_up() with mysql_connection.connect() as sql: sql.reset({ 'checkpoint': Checkpoint.get_table_definition(), }) Program.mock_program_config( self.program_label, { 'name': self.program_name, 'default_portal_type': 'name_or_id', 'description': self.program_description, 'cohorts': { self.cohort_label: self.cohort } }) application = webapp2.WSGIApplication(api_routes, config={ 'webapp2_extras.sessions': { 'secret_key': self.cookie_key } }, debug=True) self.testapp = webtest.TestApp(application)
def test_project_task_body(self): """Tasks should get dynamic properties from their definition.""" program_label = 'demo-program' task_label = 'task_foo' project = Project.create(program_label=program_label, organization_id='Organization_Foo') task_template = { 'label': task_label, 'body': "<p>Demo body.</p>", } Program.mock_program_config( program_label, {'project_tasklist_template': [{ 'tasks': [task_template] }]}) t = Task.create(task_label, 1, 'checkpoint_foo', parent=project, program_label=program_label) t_dict = t.to_client_dict() self.assertIsInstance(t_dict['body'], basestring) self.assertGreater(len(t_dict['body']), 0)
def test_create_select(self): program_label = 'demo-program' task_label = 'task_foo' project = Project.create(program_label=program_label, organization_id='Organization_Foo') task_template = { 'label': task_label, 'data_type': 'radio', 'select_options': [{ 'value': 'normal', 'label': 'true names' }, { 'value': 'alias', 'label': 'aliases' }], } Program.mock_program_config( program_label, {'project_tasklist_template': [{ 'tasks': [task_template] }]}) t = Task.create(task_label, 1, 'checkpoint_foo', parent=project, program_label=program_label) t_dict = t.to_client_dict() self.assertEqual(t_dict['select_options'], task_template['select_options'])
def test_initial_values(self): """Adopt initial values if specified, overriding defaults.""" # Background program_label = 'demo-program' task_label_default = 'task_default' task_label_init = 'task_init' # One task with default values, one with a non-default initial value. task_template_default = { 'label': task_label_default, # default value of disabled is False } task_template_initial_values = { 'label': task_label_init, 'initial_values': { 'disabled': True }, # override default } Program.mock_program_config( program_label, { 'project_tasklist_template': [{ 'tasks': [ task_template_default, task_template_initial_values, ] }] }) # Creating the project will generate a tasklist with the above tasks. project = Project.create(program_label=program_label, organization_id='Organization_Foo') self.assertFalse(project.tasklist.tasks[0].disabled) self.assertTrue(project.tasklist.tasks[1].disabled)
def set_up(self): # Let ConsistencyTestCase set up the datastore testing stub. super(TestApiSurveys, self).set_up() application = webapp2.WSGIApplication(api_routes, config={ 'webapp2_extras.sessions': { 'secret_key': self.cookie_key } }, debug=True) self.testapp = webtest.TestApp(application) with mysql_connection.connect() as sql: sql.reset({ 'classroom': Classroom.get_table_definition(), 'metric': Metric.get_table_definition(), 'program': Program.get_table_definition(), 'survey': Survey.get_table_definition(), 'team': Team.get_table_definition(), 'user': User.get_table_definition(), }) self.ep_program = Program.create( name="Engagement Project", label='ep18', active=True, preview_url='foo.com', ) self.ep_program.put()
def create_checkpoints(self): """Checkpoints should gain additional properties as client dicts.""" config = { 'project_tasklist_template': [ { 'name': "Project Foo", 'label': 'demo_project__foo', 'body': "foo", 'tasks': [], }, { 'name': "Project Bar", 'label': 'demo_project__bar', 'body': "bar", 'tasks': [], }, ], } Program.mock_program_config('demo-program', config) checkpoints = ( Checkpoint.create( parent_id='Project_foo', ordinal=1, program_label='demo-program', **config['project_tasklist_template'][0] ), Checkpoint.create( parent_id='Project_bar', ordinal=2, program_label='demo-program', **config['project_tasklist_template'][1] ), ) Checkpoint.put_multi(checkpoints) return checkpoints
def set_up(self): # Let ConsistencyTestCase set up the datastore testing stub. super(TestApiParticipation, self).set_up() application = webapp2.WSGIApplication(api_routes, config={ 'webapp2_extras.sessions': { 'secret_key': self.cookie_key } }, debug=True) self.testapp = webtest.TestApp(application) # Successful download of completion ids triggers a notification, which # requires a cohort name. Program.mock_program_config( self.program_label, {'cohorts': { self.cohort_label: { 'name': self.cohort_label } }}, ) with mysql_connection.connect() as sql: sql.reset({ 'participant': Participant.get_table_definition(), 'participant_data': ParticipantData.get_table_definition(), })
def __init__(self, optifs=True, postprocess=True, nobcs=False, slice=False): self.prog = Program() self.fncs = {} self.fncsl = [] self.fnc = None self.loc = None self.optifs = optifs self.postproc = postprocess self.slice = slice self.loops = [] self.cnt = 0 self.warns = [] self.hasbcs = False self.nobcs = nobcs
def set_up(self): # Let ConsistencyTestCase set up the datastore testing stub. super(TestApiNetworks, self).set_up() application = webapp2.WSGIApplication(api_routes, config={ 'webapp2_extras.sessions': { 'secret_key': self.cookie_key } }, debug=True) self.testapp = webtest.TestApp(application) with mysql_connection.connect() as sql: sql.reset({ 'network': Network.get_table_definition(), 'organization': Organization.get_table_definition(), 'program': Program.get_table_definition(), 'user': User.get_table_definition(), }) self.program = Program.create( name="Engagement Project", label='ep18', min_cycles=3, active=True, preview_url='foo.com', ) self.program.put()
def create_project_cohort(self, cohort_date=datetime.datetime.today()): program_label = 'demo-program' cohort_label = 'demo-cohort' program = Program.get_config(program_label) org_id = 'Org_Foo' liaison_id = 'User_liaison' project = Project.create(organization_id=org_id, program_label=program_label) project.put() one_day = datetime.timedelta(days=1) cohort_config = { 'label': cohort_label, 'name': 'Demo Cohort', 'open_date': str(cohort_date - one_day), # yesterday 'close_date': str(cohort_date + one_day), # tomorrow } program['cohorts'][cohort_label] = cohort_config Program.mock_program_config( program_label, {'cohorts': { cohort_label: cohort_config }}, ) pc = ProjectCohort.create( project_id=project.uid, organization_id=org_id, program_label=program_label, cohort_label=cohort_label, liaison_id=liaison_id, ) pc.put() return pc
def get(self, program_id_or_label): user = self.get_current_user() if not user.super_admin: return self.http_forbidden() program = Program.get_by_id(program_id_or_label) if not program: program = Program.get_by_label(program_id_or_label) if not program: return self.http_not_found() search_str = self.get_param('q', unicode, None) if not search_str: return self.write([]) if search_str.startswith('user:'******'t have team r orgs = Organization.query_by_name(search_str, program.uid) teams = Team.query_by_name(search_str, program.uid) classrooms = Classroom.query_by_name(search_str, program.uid) users = User.query_by_name_or_email(search_str) self.write({ 'organizations': [e.to_client_dict() for e in orgs], 'teams': [e.to_client_dict() for e in teams], 'classrooms': [e.to_client_dict() for e in classrooms], 'users': [e.to_client_dict() for e in users], })
def set_up(self): # Let ConsistencyTestCase set up the datastore testing stub. super(TestApiAuthentication, self).set_up() application = webapp2.WSGIApplication(api_routes, config={ 'webapp2_extras.sessions': { 'secret_key': self.cookie_key } }, debug=True) self.testapp = webtest.TestApp(application) # Tone down the intense number of hashing rounds for passwords so our # unit tests are fast. User.password_hashing_context.update( sha512_crypt__default_rounds=1000, sha256_crypt__default_rounds=1000, ) # Should be able to register with a legit cohort. Make sure it exists. Program.mock_program_config( self.program_label, { 'name': self.program_name, 'cohorts': { self.cohort_label: { 'name': self.cohort_label } } }, )
def set_up(self): # Let ConsistencyTestCase set up the datastore testing stub. super(TestApiParticipants, self).set_up() application = self.patch_webapp(webapp2.WSGIApplication)( api_routes, config={ 'webapp2_extras.sessions': { 'secret_key': self.cookie_key } }, debug=True) self.testapp = webtest.TestApp(application) with mysql_connection.connect() as sql: sql.reset({ 'classroom': Classroom.get_table_definition(), 'cycle': Cycle.get_table_definition(), 'participant': Participant.get_table_definition(), 'program': Program.get_table_definition(), 'team': Team.get_table_definition(), 'user': User.get_table_definition(), }) self.program = Program.create( name="Engagement Project", label='ep18', min_cycles=3, active=True, preview_url='foo.com', ) self.program.put()
def test_select_checkpoints_with_offset(self): """Can use queries like "LIMIT 20,10" to get "pages" of records.""" # Create two checkpoints. program_label = 'demo-program' Program.mock_program_config( program_label, { 'project_tasklist_template': [ { 'name': 'Foo', 'label': 'checkpoint_foo', 'tasks': [] }, { 'name': 'Bar', 'label': 'checkpoint_bar', 'tasks': [] }, ] }) project = Project.create(program_label=program_label, organization_id='Organization_Foo') project.put() # Select each of the two checkpoints in different queries with one-row # pages. r1 = Checkpoint.get(program_label=program_label, n=1, offset=0) r2 = Checkpoint.get(program_label=program_label, n=1, offset=1) self.assertNotEqual(r1[0].uid, r2[0].uid)
def set_up(self): """Clear relevant tables from testing SQL database.""" # Let ConsistencyTestCase set up the datastore testing stub. super(TestProjectCohort, self).set_up() Program.mock_program_config(self.program_label, { 'default_portal_type': 'name_or_id', })
def test_join_org(self): """Joining an org makes task reminders for all contained tasklists.""" program_label = 'demo-program' cohort_label = 'demo-cohort' # Guarantee the dates will work by mocking the cohort config. cohort_config = { 'label': cohort_label, 'name': 'Demo Cohort', 'open_date': str(datetime.date.today()), 'close_date': str(datetime.date.today()), } Program.mock_program_config( program_label, {'cohorts': { cohort_label: cohort_config }}, ) program = Program.get_config(program_label) tasklist_template = program['surveys'][0]['survey_tasklist_template'] owner = User.create(email='*****@*****.**', user_type='user') org = Organization.create(name="Foo Org", liaison_id=owner.uid) owner.owned_organizations = [org.uid] project = Project.create(program_label=program_label, organization_id=org.uid) survey = Survey.create(tasklist_template, project_id=project.uid, organization_id=org.uid, program_label=program_label, ordinal=1, cohort_label=cohort_label) owner.put() org.put() project.put() survey.put() # The assumption here is the org and its contents are long-standing, # so force consistency with all. org.key.get() project.key.get() survey.key.get() joiner = User.create(email='*****@*****.**', user_type='user') joiner.put() self.testapp.post_json( '/api/users/{}/organizations'.format(joiner.uid), org.to_client_dict(), headers=login_headers(owner.uid), ) # One TaskReminder for each of: org tasklist, project tasklist, survey # tasklist. self.assertEqual(len(TaskReminder.get(ancestor=joiner)), 3) return (org, project, survey, owner, joiner)
def test_queue_org_welcome_back(self): Program.mock_program_config('p1', {'project_tasklist_template': []}) # Case 1: New PC, returning. returning_pc1 = ProjectCohort.create( program_label='p1', organization_id='Organization_returning', project_id='Project_returning', cohort_label='2020', ) returning_pc1.put() returning_pc2 = ProjectCohort.create( program_label='p1', organization_id='Organization_returning', project_id='Project_returning', cohort_label='2019', created=datetime.datetime.now() - datetime.timedelta(days=365)) returning_pc2.put() # Case 2: New PC, but not returning. new_pc = ProjectCohort.create( program_label='p1', organization_id='Organization_new', project_id='Project_new', ) new_pc.put() # Case 3: Old PC (not created in the day). old_pc = ProjectCohort.create( program_label='p1', organization_id='Organization_old', project_id='Project_old', created=datetime.datetime.now() - datetime.timedelta(hours=48), ) old_pc.put() # Some tasks are created on put. We're not interested in these. creation_tasks = self.taskqueue_stub.get_filtered_tasks() templates = [ self.create_mandrill_template('p1-{}'.format( auto_prompt.ORG_WELCOME_BACK_SUFFIX)), ] auto_prompt.queue_org_welcome_back(templates) tasks = self.taskqueue_stub.get_filtered_tasks() num_new_tasks = len(tasks) - len(creation_tasks) # Only the returning pc should have a task queued. self.assertEqual(num_new_tasks, 1) expected_url = '/task/email_project/Project_returning/p1-org-welcome-back' self.assertIn(expected_url, [t.url for t in tasks]) Program.reset_mocks()
def test_override_portal_message(self): label = 'override-program' config = {'override_portal_message': 'Override message.'} Program.mock_program_config(label, config) pc = ProjectCohort.create(program_label=label) self.assertEqual( pc.to_client_dict()['portal_message'], config['override_portal_message'], )
def add_youtube_feeds(cls): channels = Channel.all().fetch(100) for channel in channels: for keyword in channel.keywords: yt_service = gdata.youtube.service.YouTubeService() gdata.alt.appengine.run_on_appengine(yt_service) uri = Programming.YOUTUBE_FEED % ('most_popular', keyword) feed = yt_service.GetRecentlyFeaturedVideoFeed() medias = Media.add_from_entry(feed.entry) for media in medias: Program.add_program(channel, media)
def get(self, template, context_id): # Attempt to treat id as as project cohort (it might be a program, or # invalid). project_cohort = ProjectCohort.get_by_id(context_id) def todt(s): return datetime.datetime.strptime(s, config.iso_date_format) if project_cohort: # This is a "real" set of instructions with data filled in. organization = Organization.get_by_id( project_cohort.organization_id) liaison = User.get_by_id(organization.liaison_id) program = Program.get_config(project_cohort.program_label) cohort = program['cohorts'][project_cohort.cohort_label] participation_open_date = todt(cohort['open_date']) # See notes in program config for why we take a day off for display. participation_close_date = (todt(cohort['close_date']) - datetime.timedelta(1)) else: # This is a generic example version of the document. try: program = Program.get_config(context_id) except ImportError: return self.http_not_found() organization = None liaison = None project_cohort = None cohort = None participation_open_date = None participation_close_date = None if template == 'custom_portal_technical_guide': # This template doesn't vary by program. template_filename = '{}.html'.format(template) else: template_filename = '{}_{}.html'.format(program['label'], template) self.write( template_filename, organization=organization, liaison=liaison, program_name=program['name'], cohort_name=cohort['name'] if cohort else '', program_description=program['description'], project_cohort=project_cohort, participation_open_date=participation_open_date, participation_close_date=participation_close_date, )
def save(program, file): """Program → File.""" folder = os.path.dirname(file) if not os.path.exists(folder): os.makedirs(folder) with open(file, 'w') as f: if isinstance(program, Program): output_program = Program() output_program.abox = program.abox output_program.tbox.constraints = program.tbox.constraints output_program.tbox.rules = set() f.write(str(output_program)) else: f.write(str(program)) return file
def joined_cohort(user, project_cohort): """Notify program and super admins.""" # This always happens along with creating a program. pc = project_cohort program_admins = User.get(owned_programs=pc.program_label) super_admins = User.get(user_type='super_admin') organization = Organization.get_by_id(pc.organization_id) program_config = Program.get_config(pc.program_label) cohort_name = program_config['cohorts'][pc.cohort_label]['name'] notes = [] for admin in program_admins + super_admins: note = Notification.create( parent=admin, context_id=pc.uid, subject=u"{org} joined a cohort".format(org=organization.name), body=( u"{org} joined {cohort} in {program}. The organization is " "currently {status}." ).format( org=organization.name, cohort=cohort_name, program=program_config['name'], status=organization.status, ), link='/organizations/{}'.format(organization.short_uid), autodismiss=True, ) notes.append(note) ndb.put_multi(notes)
def test_create_for_team(self): """Should populate only with metrics active by default.""" metric1 = Metric.create(name="Community of Helpers", label='community_of_helpers') metric1.put() metric2 = Metric.create(name="Feedback for Growth", label='feedback_for_growth') metric2.put() program = Program.create( name='Test Program', label='test-program', metrics=[ { 'uid': metric1.uid, 'default_active': True }, { 'uid': metric2.uid, 'default_active': False }, ], preview_url='foo.com', ) program.put() team = Team.create( captain_id='User_captain', program_id=program.uid, ) survey = Survey.create_for_team(team) self.assertEqual(survey.metrics, [metric1.uid]) self.assertEqual(survey.open_responses, [metric1.uid]) self.assertEqual(survey.meta, {})
def test_team_in_program_with_use_cycles_false_gets_single_cycle(self): user = User.create(name='foo', email='*****@*****.**') user.put() cycleless_program = Program.create( label='cycleless', name='Cycleless Program', preview_url='http://cycle.less', use_cycles=False, min_cycles=1, max_cycles=1 ) cycleless_program.put() cycleless_team_params = { "name": 'Cycleless Team', "uid": 'Team_cycleless', "program_id": cycleless_program.uid } response = self.testapp.post_json( '/api/teams', cycleless_team_params, headers=self.login_headers(user) ) team_uid = json.loads(response.body)['uid'] cycles = Cycle.get(team_id=team_uid) self.assertEqual(len(cycles), 1)
def create_pd_context(self): program_label = 'demo-program' program_config = Program.get_config(program_label) template = program_config['surveys'][0]['survey_tasklist_template'] project_cohort = ProjectCohort.create( program_label=program_label, organization_id='Organization_foo', project_id='Project_foo', cohort_label='2018', ) project_cohort.put() survey = Survey.create( template, program_label=program_label, organization_id='Organization_foo', project_cohort_id=project_cohort.uid, ordinal=1, ) survey.put() participant = Participant.create(name='Pascal', organization_id='PERTS') participant.put() return (project_cohort, survey, participant)
def test_create(self): """Anyone can create a team with themselves as captain.""" # Success. user, team_dict = self.create() self.assertEqual(team_dict['captain_id'], user.uid) fetched_user = User.get_by_id(user.uid) self.assertEqual(fetched_user.owned_teams, [team_dict['uid']]) # Program not found. team_params = {'name': 'Team Foo', 'program_id': 'Program_dne'} self.testapp.post_json( '/api/teams', team_params, headers=self.login_headers(user), status=400, ) # Inactive program. inactive_program = Program.create( name="Inactive Program", label='inactive', active=False, preview_url='foo.com', ) inactive_program.put() self.testapp.post_json( '/api/teams', dict(team_params, program_id=inactive_program.uid), headers=self.login_headers(user), status=400, )
def post(self, project_id, slug): """A project has been identified as new. Send them a welcome.""" project = Project.get_by_id(project_id) program = Program.get_config(project.program_label) org = Organization.get_by_id(project.organization_id) # The Org liaison is preferred, but is not guaranteed to be set (users # choose their org liaison explicitly as one of the org tasks). Default # to the Project liaison, which is set on creation in # add_program.controller.js@joinProgram org_liaison = User.get_by_id(org.liaison_id) project_liaison = User.get_by_id(project.liaison_id) liaison = org_liaison or project_liaison email = Email.create( to_address=liaison.email, mandrill_template=slug, mandrill_template_content={ 'program_name': program['name'], 'organization_name': org.name, 'liaison_name': liaison.name, 'join_date': util.datelike_to_iso_string(project.created), }, ) email.put()
def changed_project_task(user, project, task, project_cohort_id=None): """If change made by an org admin, notify related account manager, or all the program owners. Otherwise, notify project liaison, or all the org admins. """ t_dict = task.to_client_dict() program_config = Program.get_config(project.program_label) if project_cohort_id: link = '/dashboard/{pc}/tasks/{ckpt}/{task}'.format( pc=DatastoreModel.convert_uid(project_cohort_id), ckpt=DatastoreModel.convert_uid(task.checkpoint_id), task=task.uid ) else: link = '/dashboard' params = { 'task_id': task.uid, 'context_id': project.uid, 'subject': "Task updated", 'body': u"{} updated \"{}\" in {}.".format( user.name, t_dict['name'], program_config['name']), 'link': link, 'autodismiss': True, } if user.non_admin: # Send to account managers (usu. set via program config -> # default_account_manager). parents = get_project_program_recipients(project) else: parents = get_project_organization_recipients(project) notes = [Notification.create(parent=p, **params) for p in parents] filtered_notes = Notification.filter_redundant(notes) ndb.put_multi(filtered_notes)
def post(self): """Anyone can create network. Same as RestHandler.post, but removes permission check. """ user = self.get_current_user() params = self.get_params(self.model.property_types()) # Sanity check program. program = Program.get_by_id(params['program_id']) if not program: return self.http_bad_request("Program not found.") if not program.active: return self.http_bad_request("Program is inactive.") # Codes are not set on POST. They're generated randomly by create() # and can be changed to a new random value later. params.pop('code', None) # Create network. org = self.model.create(**params) org.put() # Associate with creating user. p = user.get_owner_property(org) if p is not None: p.append(org.uid) user.put() self.write(org) return org
def test_checkpoint_client_dict(self): """Checkpoints should gain additional properties as client dicts.""" checkpoint_template = { 'name': "Project Foo", 'label': 'demo_project__foo', 'body': "foo", 'tasks': [], } config = {'project_tasklist_template': [checkpoint_template]} Program.mock_program_config('demo-program', config) checkpoint = Checkpoint.create(parent_id='Project_foo', ordinal=1, program_label='demo-program', **checkpoint_template) self.assertIsNone(getattr(checkpoint, 'body', None)) self.assertIsNotNone(checkpoint.to_client_dict()['body'])
def addProgram(self, name): p = Program.selectBy(name=name) if p.count() == 0: Program(name=name) raise redirect("/admin") else: raise redirect("/admin?message='A program with this name already exists'")
def index(self, program=None): program = util.session("current_program", identity.current.user.programID, program) if identity.in_group("global_admin") and int(program) == 0: userlist = User.select() program = identity.current.user.programID elif int(program) == 0: program = identity.current.user.programID userlist = User.selectBy(programID=program) else: userlist = User.selectBy(programID=program) programlist = Program.select() return dict( program=Program.get(program), programlist=programlist, curProg=self.curProg, userlist=userlist, shaded=util.shaded, )
def add_program(self, channel, media, time): programming = memcache.get('programming') or {} program = Program.add_program(channel, media, time=time, min_time=time, max_time=(time + datetime.timedelta(media.duration + 50))) logging.info(program) if program: if not programming.get(channel.id, None): programming[channel.id] = [] programming.get(channel.id).append(program.toJson(fetch_channel=False, fetch_media=True, media_desc=False, pub_desc=False)) memcache.set('programming', programming)
def index(self, id=None, message=""): id = util.session('current_user', identity.current.user.id, id) if int(id) == 0: raise redirect('/userprofile/createUser?message=message') user = User.get(id) groups = [g.group_name for g in user.groups] programlist = Program.select() return dict(user=user, programlist=programlist, curProg=self.curProg, curGroup=self.curGroup, groups=groups, message=message)
def program_api(): if request.method == 'POST': obj = json.loads(request.data) app.access_base.add_program(Program.from_json(obj)) result = ({'status': '0'}) elif request.method == 'GET': result = [p.to_json() for p in app.access_base.programs()] else: result = {"status": 1, "message": "Invalid request"} return Response(json.dumps(result), mimetype='application/json', headers={'Cache-Control': 'no-cache'})
def set_user_channel_programs(self, key, channel, medias, time=None, reset=False): from model import Program programs = [] programs_json = [] channel_json = {} if not reset and memcache.get(channel.id): channel_json = memcache.get(channel.id) or {} programs_json = channel_json.get('programs', []) else: channel_json['channel'] = channel.toJson() next_time = time or datetime.datetime.now() if len(programs_json): next_time = iso8601.parse_date(programs_json[-1]['time']).replace(tzinfo=None) + \ datetime.timedelta(seconds=programs_json[-1]['media']['duration']) for media in medias: program = Program.add_program(channel, media, time=next_time) if program: programs.append(program) programs_json.append(program.toJson(fetch_channel=False, fetch_media=True, media_desc=False, pub_desc=False)) next_time = next_time + datetime.timedelta(seconds=media.duration) if len(pickle.dumps(programs_json)) > 1000000: # We can only fit 1mb into memcache break deferred.defer(Programming.fetch_related_tweets, medias, _name='twitter-' + channel.id + '-' + str(uuid.uuid1()), _queue='twitter') user_obj = memcache.get(key) or {} user_channels = (user_obj.get('channels') or []) if user_obj else [] if not channel.id in user_channels: user_channels.append(channel.id) user_obj['channels'] = user_channels memcache.set(key, user_obj) channel_json['programs'] = programs_json memcache.set(channel.id, channel_json) return programs
def schedule_youtube_channel(name, user_id=None, token=None, channel_id=None, yt_channel_id=None, yt_playlist_id=None): import broadcast import constants import datetime import programming from model import Media from model import Channel from model import User from model import common from model import Program user = None if user_id: user = User.get_by_key_name(user_id) channel = Channel(key_name=(user_id or token) + '-' + (yt_channel_id or yt_playlist_id), name=name, privacy=constants.Privacy.FRIENDS, online=True, user=user) medias = [] youtube3 = common.get_youtube3_service() next_page_token = '' search_response = {} if yt_channel_id: if yt_channel_id.startswith('HC'): channel_response = youtube3.channels().list( id=yt_channel_id, part='topicDetails', maxResults=1 ).execute() if len(channel_response.get('items', [])): topic_id = channel_response.get('items')[0]['topicDetails']['topicIds'][0] search_response = youtube3.search().list( topicId=topic_id, order='date', part='id,snippet', maxResults=10, fields='items', type='video' ).execute() else: search_response = youtube3.search().list( channelId=yt_channel_id, part='id,snippet', order='date', maxResults=10, type='video' ).execute() elif yt_playlist_id: search_response = youtube3.playlistItems().list( playlistId=yt_playlist_id, part='id,snippet', maxResults=10, fields='items' ).execute() search_ids = '' for item in search_response.get('items', [])[1:]: if item['kind'] == 'youtube#searchResult': search_ids += item['id']['videoId'] + ',' elif item['kind'] == 'youtube#playlistItem': search_ids += item['snippet']['resourceId']['videoId'] + ',' videos_response = youtube3.videos().list( id=search_ids, part="id,snippet,topicDetails,contentDetails,statistics" ).execute() for item in videos_response.get("items", []): medias = Media.add_from_snippet([item], approve=True) programs = [] if user_id: programs = programming.Programming.set_user_channel_programs(user_id, channel, medias) else: next_time = datetime.datetime.now() for media in medias: program = Program.add_program(channel, media, time=next_time) if program: programs.append(program) next_time = next_time + datetime.timedelta(seconds=media.duration) broadcast.broadcastNewPrograms(channel, programs, token=token)
class Parser(object): ''' Common stuff for parser for any language ''' def __init__(self, optifs=True, postprocess=True, nobcs=False, slice=False): self.prog = Program() self.fncs = {} self.fncsl = [] self.fnc = None self.loc = None self.optifs = optifs self.postproc = postprocess self.slice = slice self.loops = [] self.cnt = 0 self.warns = [] self.hasbcs = False self.nobcs = nobcs def newcnt(self): self.cnt += 1 return self.cnt def ssavar(self, var): return '%s_&%d&' % (var, self.newcnt()) def addwarn(self, msg, *args): if args: msg %= args self.prog.addwarn(str(msg)) def rmemptyfncs(self): ''' Removes empty functions, i.e., declarations only ''' for fnc in self.prog.getfncs(): if fnc.initloc is None: self.prog.rmfnc(fnc.name) def rmunreachlocs(self, fnc): ''' Removes unreachable locations from the graph ''' visited = set() tovisit = [fnc.initloc] while len(tovisit) > 0: loc = tovisit.pop() if loc in visited: continue visited.add(loc) l1 = fnc.trans(loc, True) if l1: tovisit.append(l1) l2 = fnc.trans(loc, False) if l2: tovisit.append(l2) for loc in fnc.locs(): if loc not in visited: fnc.rmloc(loc) def ssa(self, fnc): ''' Converts exprs of each loc to SSA form ''' for loc in fnc.locs(): # Find last appearance of each var last = {} for i, (var, _) in enumerate(fnc.exprs(loc)): last[var] = i # Replace non-last appearance by a fresh var m = {} exprs = [] for i, (var, expr) in enumerate(fnc.exprs(loc)): for v1, v2 in m.items(): expr = expr.replace(v1, Var(v2)) if var == VAR_RET: newvar = var else: if last[var] > i: newvar = m[var] = self.ssavar(var) else: m.pop(var, None) newvar = var if var != newvar: expr.original = (var, self.cnt) exprs.append((newvar, expr)) fnc.replaceexprs(loc, exprs) def rmtmp(self, fnc): ''' Removes (merges) "tmp" or SSA-generated assignments ''' for loc in fnc.locs(): m = {} exprs = [] primed = set([]) lastret = None # Remember "real" vars and replace temps for var, expr in fnc.exprs(loc): #expr.statement = True expr.prime(primed) for v, e in m.items(): expr = expr.replace(v, e) if isinstance(expr, Op) and expr.name == 'ite': expr.args[0].original = None expr.args[1].original = None expr.args[2].original = None if var.endswith('&'): m[var] = expr else: if var == VAR_RET: lastret = len(exprs) exprs.append((var, expr)) if var != VAR_RET: primed.add(var) # "Merge" return stmts nexprs = [] retexpr = None retcond = None for i, (var, expr) in enumerate(exprs): if var == VAR_RET: tmpretcond = self.getretcond(expr) if tmpretcond is True or retcond is None: retcond = tmpretcond elif tmpretcond is not None and retcond is not True: retcond = Op(self.OROP, retcond, tmpretcond) if retexpr: retexpr = retexpr.replace(VAR_RET, expr) else: retexpr = expr if i == lastret: nexprs.append((var, retexpr)) else: if retcond is True: continue elif retcond: expr = Op('ite', Op(self.NOTOP, retcond), expr, Var(var)) nexprs.append((var, expr)) fnc.replaceexprs(loc, nexprs) def getretcond(self, expr): if isinstance(expr, Op) and expr.name == 'ite': icond = expr.args[0] ct = self.getretcond(expr.args[1]) cf = self.getretcond(expr.args[2]) cond = [] if ct is None and cf is None: return None if ct is True and cf is True: return True if ct: if ct is True: cond.append(icond.copy()) else: cond.append(Op(self.ANDOP, icond.copy(), ct.copy())) if cf: nicond = Op(self.NOTOP, icond) if cf is True: cond.append(nicond.copy()) else: cond.append(Op(self.ANDOP, nicond.copy(), cf.copy())) if len(cond) == 1: return cond[0] else: return Op(self.OROP, cond[0], cond[1]) elif isinstance(expr, Var) and expr.name == VAR_RET: return None else: return True def postprocess(self): if not self.postproc: return self.rmemptyfncs() for fnc in self.prog.fncs.values(): self.rmunreachlocs(fnc) self.ssa(fnc) self.rmtmp(fnc) def visit(self, node): # Skip None-node if node is None: return # Name of the node class name = node.__class__.__name__ # Get method meth = getattr(self, 'visit_%s' % (name,), None) if meth is None: raise NotSupported("Unimplemented visitor: '%s'" % (name,)) # Call visitor method return meth(node) def visit_expr(self, node, allowlist=False, allownone=False): res = self.visit(node) if isinstance(res, list) and allowlist: ok = True for r in res: if not isinstance(r, Expr): ok = False break if ok: return res if res and not isinstance(res, Expr): raise ParseError("Expected expression, got '%s'" % (res,), line=node.coord.line) if (not res) and (not allownone): if node: self.addwarn("Expression expected at line %s" % ( node.coord.line,)) else: self.addwarn("Expression expected") res = Const('?') return res def visit_if(self, node, cond, true, false): # Add condition (with new location) preloc = self.loc condloc = self.addloc('the condition of the if-statement at line %d' % ( self.getline(cond) )) condexpr = self.visit_expr(cond, allowlist=True) if isinstance(condexpr, list): condexpr = self.expr_list_and(condexpr) self.addexpr(VAR_COND, condexpr) # Add true loc trueline = self.getline(true) or self.getline(node) trueloc = self.addloc('inside the if-branch starting at line %d' % ( trueline)) self.visit(true) afterloc1 = self.loc afterloc = self.addloc('after the if-statement beginning at line %s' % ( self.getline(node) )) # Add (general) transitions self.addtrans(preloc, True, condloc) self.addtrans(condloc, True, trueloc) self.addtrans(afterloc1, True, afterloc) # Add false loc if false: falseloc = self.addloc('inside the else-branch starting at line %d' % ( self.getline(false))) self.visit(false) afterloc2 = self.loc self.addtrans(condloc, False, falseloc) self.addtrans(afterloc2, True, afterloc) else: self.addtrans(condloc, False, afterloc) falseloc = None # "Loop-less" if-statement if trueloc == afterloc1 and ((not false) or falseloc == afterloc2): if self.optifs: self.optimizeif(preloc, condexpr, trueloc, falseloc) return self.loc = afterloc def optimizeif(self, preloc, condexpr, trueloc, falseloc): ''' Optimized "simple" or "loop-less" if statement ''' # Remove unneded part of the graph self.fnc.rmtrans(preloc, True) self.loc = preloc # Keep track of assigned vars varss = set() varsl = [] mt = {} mf = {} # Add exprs from branches def addvars(loc, m): for (var, expr) in self.fnc.exprs(loc): newvar = self.ssavar(var) if var not in varss: varss.add(var) varsl.append(var) # Replace vars mapped so far for (v1, v2) in m.items(): expr = expr.replace(v1, Var(v2)) expr.original = (var, self.cnt) self.addexpr(newvar, expr) # Remember replacement m[var] = newvar addvars(trueloc, mt) if falseloc is not None: addvars(falseloc, mf) # Add condition condvar = self.ssavar('$cond') self.addexpr(condvar, condexpr.copy()) # Merge branches for var in varsl: self.addexpr(var, Op('ite', Var(condvar), Var(mt.get(var, var)), Var(mf.get(var, var)))) def expr_list_and(self, exprs): if len(exprs) == 0: return None else: newexpr = exprs[0] for expr in exprs[1:]: newexpr = Op('&&', newexpr, expr, line=expr.line) return newexpr def visit_loop(self, node, init, cond, next, body, do, name, prebody=None): # Visit init stmts if init: self.visit(init) # Add condition (with new location) preloc = self.loc if isinstance(cond, Expr): condexpr = cond else: condexpr = self.visit_expr(cond, allowlist=True) if isinstance(condexpr, list): condexpr = self.expr_list_and(condexpr) if not condexpr: condexpr = Const('1') condloc = self.addloc("the condition of the '%s' loop at line %s" % ( name, condexpr.line or self.getline(node))) self.addexpr(VAR_COND, condexpr) # Add exit loc exitloc = self.addloc("*after* the '%s' loop starting at line %d" % ( name, self.getline(node) )) # Add body with (new location) bodyloc = self.addloc("inside the body of the '%s' loop beginning at line %d" % ( name, self.getline(body) or self.getline(node) )) self.addloop((condloc, exitloc)) if prebody: map(lambda x: self.addexpr(*x), prebody) self.visit(body) if next: self.visit(next) self.poploop() afterloc = self.loc # Connect transitions self.addtrans(preloc, True, bodyloc if do else condloc) self.addtrans(condloc, True, bodyloc) self.addtrans(condloc, False, exitloc) self.addtrans(afterloc, True, condloc) self.loc = exitloc def addfnc(self, name, params, rettype): if self.fnc: self.fncsl.append((self.fnc, self.loc)) self.fnc = Function(name, params, rettype) self.fncs[name] = self.fnc self.prog.addfnc(self.fnc) def endfnc(self): if self.fncsl: self.fnc, self.loc = self.fncsl.pop() else: self.fnc = None self.loc = None def addloc(self, desc): assert (self.fnc), 'No active fnc!' self.loc = self.fnc.addloc(desc=desc) return self.loc def addexpr(self, name, expr, loc=None, idx=None): assert (self.fnc), 'No active fnc!' if not loc: loc = self.loc self.fnc.addexpr(loc, name, expr, idx=idx) def numexprs(self, loc=None): assert (self.fnc), 'No active fnc!' if not loc: loc = self.loc return self.fnc.numexprs(loc) def rmlastexprs(self, loc=None, num=1): assert (self.fnc), 'No active fnc!' if not loc: loc = self.loc self.fnc.rmlastexprs(loc, num) def addtrans(self, loc1, cond, loc2): assert (self.fnc), 'No active fnc!' self.fnc.addtrans(loc1, cond, loc2) def addtype(self, var, type, skiponexist=True): assert (self.fnc), 'No active fnc!' self.fnc.addtype(var, type, skiponexist) def hasvar(self, var): assert (self.fnc), 'No active fnc' return self.fnc.gettype(var) is not None def addloop(self, l): self.loops.append(l) def poploop(self): return self.loops.pop() def lastloop(self): return self.loops[-1] if len(self.loops) else None def isfncname(self, name): return name in self.fncs @classmethod def parse_code(cls, code, *args, **kwargs): parser = cls(*args, **kwargs) parser.parse(code) parser.postprocess() if parser.slice: parser.prog.slice() return parser.prog
def set_programming(cls, channel_id, duration=3600, schedule_next=False, fetch_twitter=True, queue='programming', target=None, kickoff=False): import broadcast import constants from model import Channel from model import Program # Stored programming programming = memcache.get('programming') or {} onlineUsers = memcache.get('web_channels') or {} logging.info('programming: ' + channel_id) next_programs = Programming.next_programs(programming.get(channel_id, []), duration, prelude=300) gap = Programming.gap(programming.get(channel_id, []), duration) logging.info('GAP: ' + str(gap)) if programming.get(channel_id) and len(programming[channel_id]) and \ programming[channel_id][0]['media'].get('live') == True: logging.info('live tweets') # Update tweets for live events media = Media.get_by_key_name(programming[channel_id][0]['media']['id']) deferred.defer(Programming.fetch_related_tweets, [], _name='twitter-' + channel_id + '-' + str(uuid.uuid1()), _queue='twitter') programs = [] if not programming.get(channel_id) or gap > 60: logging.info('PROGRAOMMING') channel = Channel.get_by_key_name(channel_id) #channel.update_next_time() viewers = (memcache.get('channel_viewers') or {}).get(str(channel_id), []) cols = channel.get_collections() all_medias = [] backup_medias = [] limit = 100 for col in cols: medias = [] filtered_medias = [] offset = 0 while offset <= 400: medias = col.get_medias(limit=limit, offset=offset) logging.info('fetched medias: ' + str(len(medias))) if not len(medias): break backup_medias += medias # Dont reprogram anytihng already scheduled filtered_medias = Programming.no_reprogram(programming.get(channel_id, []), medias) # Don't repeat the same program within two hour cutoff = 7200 if col.lifespan else 43200 filtered_medias = [c for c in filtered_medias if not c.last_programmed or (datetime.datetime.now() - c.last_programmed).seconds > cutoff] # At most, 30% of the audience has already "witnessed" this program # filtered_medias = [m for m in filtered_medias if not len(viewers) or # float(len(Programming.have_seen(m, viewers)))/len(viewers) < .3] all_medias += filtered_medias logging.info('all medias: ' + str(len(all_medias))) offset += limit all_medias = backup_medias if not len(all_medias) else all_medias # Don't repeat already programmed # all_medias = Programming.no_reprogram(next_programs, all_medias) # StorySort algorithm # all_medias = Programming.story_sort(all_medias) # Only one publisher per story all_medias = Programming.unique_publishers(all_medias) # Grab "duration" seconds of programming all_medias = Programming.timed_subset(all_medias, duration) if fetch_twitter: # Find related twitter posts deferred.defer(Programming.fetch_related_tweets, all_medias, _name='twitter-' + channel.name.replace(' ', '') + '-' + str(uuid.uuid1()), _queue='twitter', _countdown=30) # Truncate old programs programming[channel_id] = Programming.cutoff_programs(programming.get(channel_id), 300) for media in all_medias: program = Program.add_program(channel, media, min_time=datetime.datetime.now(), max_time=(datetime.datetime.now() + datetime.timedelta(seconds=duration))) logging.info(program) if program: if not programming.get(channel_id, None): programming[channel_id] = [] programming.get(channel_id).append(program.toJson(fetch_channel=False, fetch_media=True, media_desc=False, pub_desc=False)) programs.append(program) logging.info('ADDING: ' + media.name + ' at: ' + program.time.isoformat()) if len(pickle.dumps(programming)) > 1000000: # We can only fit 1mb into memcache break if len(programs): broadcast.broadcastNewPrograms(channel, programs) memcache.set('programming', programming) channels = memcache.get('channels') or [] updated = False for i,c in enumerate(channels): if c['id'] == channel_id: channels[i] = channel.toJson(get_programming=False) updated = True if not updated: channels.append(channel.toJson(get_programming=False)) memcache.set('channels', channels) # Schedule our next programming selection if schedule_next and (not constants.SLEEP_PROGRAMMING or (constants.SLEEP_PROGRAMMING and (kickoff or len(onlineUsers.keys())))): logging.info('NUMBER OF PROGRAMS: ' + str(len(programs))) if len(programs) > 1: next_gen = (programs[-2].time - datetime.datetime.now()).seconds / 2 elif len(programs) == 1: next_gen = programs[0].media.duration / 2 else: next_gen = 60 next_gen = min(next_gen, reduce(lambda x, y: x + y, [p.media.duration for p in programs], 0) \ if len(programs) else 10) next_gen = min(next_gen, duration / 2) logging.info('COUNTDOWN FOR ' + channel_id + ': ' + str(next_gen)) deferred.defer(Programming.set_programming, channel_id, fetch_twitter=fetch_twitter, _name=channel_id + '-' + str(uuid.uuid1()), _countdown=next_gen, _queue=queue) return programs