def setUp(self): app = webapp2.WSGIApplication([webapp2.Route(r'/<team_key:>', ApiTeamHistoryRobotsController, methods=['GET'])], debug=True) self.testapp = webtest.TestApp(app) self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_urlfetch_stub() self.testbed.init_memcache_stub() self.testbed.init_taskqueue_stub(root_path=".") self.team = Team( id="frc1124", name="UberBots", team_number=1124, nickname="UberBots", ) self.robot = Robot( id="frc1124_2015", team=self.team.key, year=2015, robot_name="Orion" ) self.team.put() self.robot.put()
def stop_robot(cls, content): content_arr = content.split(' ') if len(content_arr) != 3: cls.send_text_msg('格式错误\n{0}'.format(cls.STOP_ROBOT_FORMAT)) return _, bot_id, wx_account = content_arr robot = Robot.get_model_filter(Robot.Id == bot_id, Robot.WxAccount == wx_account) if robot is None: cls.send_text_msg('{0} {1} 助手未找到'.format(robot.Id, robot.WxAccount)) return logger.info('stoping bot {0} {1} {2}'.format(robot.Id, robot.WxAccount, robot.Pid)) robot.stop() Robot.save([robot]) cmd = 'kill {0}'.format(robot.Pid) ret = os.system(cmd) logger.debug(ret) cls.send_text_msg('停止中\n你可以查看所有助手状态')
def create_root_bot(self, wx_account, user_id, topic_id, nickname, robot_functions): Robot.add_robot(wx_account, user_id, topic_id, nickname, map(str, robot_functions), auto_commit=True)
def show_form(self, mid=None): if mid: robot = Robot.get_by_id(int(mid), parent=self.current_user.key) else: robot = Robot() form = forms.RobotForm(data=robot.to_dict()) self.render_response('robot/form.html', form=form, mid=mid, robot=robot)
def parse(self, response): """ Parse team info from FMSAPI Returns a tuple of: list of models (Team, DistrictTeam, Robot), and a Boolean indicating if there are more pages to be fetched """ # Get team json # don't need to null check, if error, HTTP code != 200, so we wont' get here current_page = response['pageCurrent'] total_pages = response['pageTotal'] teams = response['teams'] ret_models = [] for teamData in teams: # Fix issue where FIRST's API returns dummy website for all teams if teamData[ 'website'] is not None and 'www.firstinspires.org' in teamData[ 'website']: website = None else: website = WebsiteHelper.format_url( teamData.get('website', None)) team = Team(id="frc{}".format(teamData['teamNumber']), team_number=teamData['teamNumber'], name=teamData['nameFull'], nickname=teamData['nameShort'], school_name=teamData.get('schoolName'), home_cmp=teamData.get('homeCMP').lower() if teamData.get('homeCMP') else None, city=teamData['city'], state_prov=teamData['stateProv'], country=teamData['country'], website=website, rookie_year=teamData['rookieYear']) districtTeam = None if teamData['districtCode']: districtKey = District.renderKeyName( self.year, teamData['districtCode'].lower()) districtTeam = DistrictTeam( id=DistrictTeam.renderKeyName(districtKey, team.key_name), team=ndb.Key(Team, team.key_name), year=self.year, district_key=ndb.Key(District, districtKey), ) robot = None if teamData['robotName']: robot = Robot(id=Robot.renderKeyName(team.key_name, self.year), team=ndb.Key(Team, team.key_name), year=self.year, robot_name=teamData['robotName'].strip()) ret_models.append((team, districtTeam, robot)) return (ret_models, (current_page < total_pages))
def parse(self, response): """ Parse team info from FMSAPI Returns a tuple of: list of models (Team, DistrictTeam, Robot), and a Boolean indicating if there are more pages to be fetched """ # Get team json # don't need to null check, if error, HTTP code != 200, so we wont' get here current_page = response['pageCurrent'] total_pages = response['pageTotal'] teams = response['teams'] ret_models = [] for teamData in teams: # Fix issue where FIRST's API returns dummy website for all teams if teamData[ 'website'] is not None and 'www.firstinspires.org' in teamData[ 'website']: website = None else: raw_website = teamData.get('website', None) website = urlparse.urlparse( raw_website, 'http').geturl() if raw_website else None # Fix oddity with urlparse having three slashes after the scheme (#1635) website = website.replace('///', '//') if website else None team = Team(id="frc{}".format(teamData['teamNumber']), team_number=teamData['teamNumber'], name=teamData['nameFull'], nickname=teamData['nameShort'], city=teamData['city'], state_prov=teamData['stateProv'], country=teamData['country'], website=website, rookie_year=teamData['rookieYear']) districtTeam = None if teamData['districtCode']: districtAbbrev = DistrictType.abbrevs[ teamData['districtCode'].lower()] districtTeam = DistrictTeam(id=DistrictTeam.renderKeyName( self.year, districtAbbrev, team.key_name), team=ndb.Key(Team, team.key_name), year=self.year, district=districtAbbrev) robot = None if teamData['robotName']: robot = Robot(id=Robot.renderKeyName(team.key_name, self.year), team=ndb.Key(Team, team.key_name), year=self.year, robot_name=teamData['robotName'].strip()) ret_models.append((team, districtTeam, robot)) return (ret_models, (current_page < total_pages))
def create_robot(user): if not user: return jsonify({'message': 'Not authorized'}), 401 current_location_x = request.json.get('current_location_x', 0) current_location_y = request.json.get('current_location_y', 0) mac_address = request.json.get('mac_address') robot = Robot(current_location_x, current_location_y, mac_address) robot.save_to_db() response = robot.serialize return jsonify(response), 201
class TestTeamHistoryRobotsApiController(unittest2.TestCase): def setUp(self): app = webapp2.WSGIApplication([ webapp2.Route(r'/<team_key:>', ApiTeamHistoryRobotsController, methods=['GET']) ], debug=True) self.testapp = webtest.TestApp(app) self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_urlfetch_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache( ) # Prevent data from leaking between tests self.testbed.init_taskqueue_stub(root_path=".") self.team = Team( id="frc1124", name="UberBots", team_number=1124, nickname="UberBots", ) self.robot = Robot(id="frc1124_2015", team=self.team.key, year=2015, robot_name="Orion") self.team.put() self.robot.put() def tearDown(self): self.testbed.deactivate() def testRobotApi(self): response = self.testapp.get( '/frc1124', headers={ "X-TBA-App-Id": "tba-tests:team_list-controller-test:v01" }) robot_dict = json.loads(response.body) self.assertTrue("2015" in robot_dict) robot = robot_dict["2015"] self.assertEqual(robot["key"], "frc1124_2015") self.assertEqual(robot["team_key"], "frc1124") self.assertEqual(robot["year"], 2015) self.assertEqual(robot["name"], "Orion")
def test_set_robot_name(self): self.loginUser() self.giveTeamAdminAccess() # There is no Robot models that exists yet for this team form = self.getTeamInfoForm(1124) form['robot_name'] = 'Test Robot Name' response = form.submit().follow() self.assertEqual(response.status_int, 301) robot = Robot.get_by_id(Robot.renderKeyName('frc1124', self.now.year)) self.assertIsNotNone(robot) self.assertEqual(robot.robot_name, 'Test Robot Name')
def add_robot(cls, content): # 添加助手 微信账号(唯一) 助手昵称 content_arr = content.split(' ') if len(content_arr) != 4: cls.send_text_msg('格式错误\n{0}'.format(cls.ADD_ROBOT_FORMAT)) return wx_account, nickname, topic_id = content_arr[1:] Robot.add_robot(wx_account, cls.robot_model.UserId, topic_id, nickname, [2, 3, 4], auto_commit=True) cls.send_text_msg('添加成功\n')
def parse(self, response): """ Parse team info from FMSAPI Returns a tuple of: list of models (Team, DistrictTeam, Robot), and a Boolean indicating if there are more pages to be fetched """ # Get team json # don't need to null check, if error, HTTP code != 200, so we wont' get here current_page = response['pageCurrent'] total_pages = response['pageTotal'] teams = response['teams'] ret_models = [] for teamData in teams: # concat city/state/country to get address address = u"{}, {}, {}".format(teamData['city'], teamData['stateProv'], teamData['country']) team = Team(id="frc{}".format(teamData['teamNumber']), team_number=teamData['teamNumber'], name=teamData['nameFull'], nickname=teamData['nameShort'], address=address, website=teamData['website'], rookie_year=teamData['rookieYear']) districtTeam = None if teamData['districtCode']: districtAbbrev = DistrictType.abbrevs[ teamData['districtCode'].lower()] districtTeam = DistrictTeam(id=DistrictTeam.renderKeyName( self.year, districtAbbrev, team.key_name), team=ndb.Key(Team, team.key_name), year=self.year, district=districtAbbrev) robot = None if teamData['robotName']: robot = Robot(id=Robot.renderKeyName(team.key_name, self.year), team=ndb.Key(Team, team.key_name), year=self.year, robot_name=teamData['robotName'].strip()) ret_models.append((team, districtTeam, robot)) return (ret_models, (current_page < total_pages))
def get(self, team_number): self._require_admin() team = Team.get_by_id("frc" + team_number) if not team: self.abort(404) event_teams = EventTeam.query(EventTeam.team == team.key).fetch(500) team_medias = Media.query(Media.references == team.key).fetch(500) robots = Robot.query(Robot.team == team.key).fetch() district_teams = DistrictTeam.query( DistrictTeam.team == team.key).fetch() years_participated = sorted( TeamParticipationQuery(team.key_name).fetch()) team_medias_by_year = {} for media in team_medias: if media.year in team_medias_by_year: team_medias_by_year[media.year].append(media) else: team_medias_by_year[media.year] = [media] media_years = sorted(team_medias_by_year.keys(), reverse=True) self.template_values.update({ 'event_teams': event_teams, 'team': team, 'team_media_years': media_years, 'team_medias_by_year': team_medias_by_year, 'robots': robots, 'district_teams': district_teams, 'years_participated': years_participated, }) path = os.path.join(os.path.dirname(__file__), '../../templates/admin/team_details.html') self.response.out.write(template.render(path, self.template_values))
def get(self, team_number): self._require_admin() team = Team.get_by_id("frc" + team_number) if not team: self.abort(404) event_teams = EventTeam.query(EventTeam.team == team.key).fetch(500) team_medias = Media.query(Media.references == team.key).fetch(500) robots = Robot.query(Robot.team == team.key).fetch() district_teams = DistrictTeam.query(DistrictTeam.team == team.key).fetch() years_participated = sorted(TeamParticipationQuery(team.key_name).fetch()) team_medias_by_year = {} for media in team_medias: if media.year in team_medias_by_year: team_medias_by_year[media.year].append(media) else: team_medias_by_year[media.year] = [media] media_years = sorted(team_medias_by_year.keys(), reverse=True) self.template_values.update({ 'event_teams': event_teams, 'team': team, 'team_media_years': media_years, 'team_medias_by_year': team_medias_by_year, 'robots': robots, 'district_teams': district_teams, 'years_participated': years_participated, }) path = os.path.join(os.path.dirname(__file__), '../../templates/admin/team_details.html') self.response.out.write(template.render(path, self.template_values))
def start_robot(cls, content): # 启动助手 助手ID 微信账号 from wxbot.bot_manager import BotManager content_arr = content.split(' ') if len(content_arr) != 4: cls.send_text_msg('格式错误\n{0}'.format(cls.START_ROBOT_FORMAT)) return bot_id = content_arr[1] wx_account = content_arr[2] topic_id = content_arr[3] robot_model = Robot.get_model_filter(Robot.Id == bot_id, Robot.WxAccount == wx_account) if robot_model is None: cls.send_text_msg('{0} {1} 助手未找到'.format(bot_id, wx_account)) return python_path = '{0}/venv/bin/python'.format(BotManager.root_path) py_path = '{0}/wx_bot.py {1} {2} {3}'.format(BotManager.root_path, bot_id, wx_account, topic_id) ret = os.system('{0} {1} &'.format(python_path, py_path)) logger.debug(ret) cls.send_text_msg('启动中\n你可以查看所有助手状态')
def get(self, team_number): self._require_admin() team = Team.get_by_id("frc" + team_number) event_teams = EventTeam.query(EventTeam.team == team.key).fetch(500) team_medias = Media.query(Media.references == team.key).fetch(500) robots = Robot.query(Robot.team == team.key).fetch() district_teams = DistrictTeam.query(DistrictTeam.team == team.key).fetch() team_medias_by_year = {} for media in team_medias: if media.year in team_medias_by_year: team_medias_by_year[media.year].append(media) else: team_medias_by_year[media.year] = [media] self.template_values.update({ 'event_teams': event_teams, 'team': team, 'team_medias_by_year': team_medias_by_year, 'robots': robots, 'district_teams': district_teams, }) path = os.path.join(os.path.dirname(__file__), '../../templates/admin/team_details.html') self.response.out.write(template.render(path, self.template_values))
def post(self): team_number = self.request.get("team_number") if not team_number: self.abort(400) team_number = int(team_number) team = Team.get_by_id("frc{}".format(team_number)) if not team: self.abort(400) self._require_team_admin_access(team_number) action = self.request.get('action') if action == "remove_media_reference": media, team_ref = self.get_media_and_team_ref(team_number) if team_ref in media.references: media.references.remove(team_ref) if team_ref in media.preferred_references: media.preferred_references.remove(team_ref) MediaManipulator.createOrUpdate(media, auto_union=False) elif action == "remove_media_preferred": media, team_ref = self.get_media_and_team_ref(team_number) if team_ref in media.preferred_references: media.preferred_references.remove(team_ref) MediaManipulator.createOrUpdate(media, auto_union=False) elif action == "add_media_preferred": media, team_ref = self.get_media_and_team_ref(team_number) if team_ref not in media.preferred_references: media.preferred_references.append(team_ref) MediaManipulator.createOrUpdate(media, auto_union=False) elif action == "set_team_info": robot_name = self.request.get("robot_name").strip() current_year = datetime.datetime.now().year robot_key = Robot.renderKeyName(team.key_name, current_year) if robot_name: robot = Robot( id=robot_key, team=team.key, year=current_year, robot_name=robot_name, ) RobotManipulator.createOrUpdate(robot) else: RobotManipulator.delete_keys([ndb.Key(Robot, robot_key)]) else: self.abort(400) self.redirect('/mod/')
def parse(self, response): """ Parse team info from FMSAPI Returns a tuple of: list of models (Team, DistrictTeam, Robot), and a Boolean indicating if there are more pages to be fetched """ # Get team json # don't need to null check, if error, HTTP code != 200, so we wont' get here current_page = response['pageCurrent'] total_pages = response['pageTotal'] teams = response['teams'] ret_models = [] for teamData in teams: # Fix issue where FIRST's API returns dummy website for all teams if teamData['website'] is not None and 'www.firstinspires.org' in teamData['website']: website = None else: website = WebsiteHelper.format_url(teamData.get('website', None)) team = Team( id="frc{}".format(teamData['teamNumber']), team_number=teamData['teamNumber'], name=teamData['nameFull'], nickname=teamData['nameShort'], school_name=teamData.get('schoolName'), home_cmp=teamData.get('homeCMP').lower() if teamData.get('homeCMP') else None, city=teamData['city'], state_prov=teamData['stateProv'], country=teamData['country'], website=website, rookie_year=teamData['rookieYear'] ) districtTeam = None if teamData['districtCode']: districtAbbrev = DistrictType.abbrevs[teamData['districtCode'].lower()] districtTeam = DistrictTeam( id=DistrictTeam.renderKeyName(self.year, districtAbbrev, team.key_name), team=ndb.Key(Team, team.key_name), year=self.year, district=districtAbbrev, district_key=ndb.Key(District, District.renderKeyName(self.year, teamData['districtCode'].lower())), ) robot = None if teamData['robotName']: robot = Robot( id=Robot.renderKeyName(team.key_name, self.year), team=ndb.Key(Team, team.key_name), year=self.year, robot_name=teamData['robotName'].strip() ) ret_models.append((team, districtTeam, robot)) return (ret_models, (current_page < total_pages))
class TestTeamHistoryRobotsApiController(unittest2.TestCase): def setUp(self): app = webapp2.WSGIApplication([webapp2.Route(r'/<team_key:>', ApiTeamHistoryRobotsController, methods=['GET'])], debug=True) self.testapp = webtest.TestApp(app) self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_urlfetch_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests self.testbed.init_taskqueue_stub(root_path=".") self.team = Team( id="frc1124", name="UberBots", team_number=1124, nickname="UberBots" ) self.robot = Robot( id="frc1124_2015", team=self.team.key, year=2015, robot_name="Orion" ) self.team.put() self.robot.put() def tearDown(self): self.testbed.deactivate() def testRobotApi(self): response = self.testapp.get('/frc1124', headers={"X-TBA-App-Id": "tba-tests:team_list-controller-test:v01"}) robot_dict = json.loads(response.body) self.assertTrue("2015" in robot_dict) robot = robot_dict["2015"] self.assertEqual(robot["key"], "frc1124_2015") self.assertEqual(robot["team_key"], "frc1124") self.assertEqual(robot["year"], 2015) self.assertEqual(robot["name"], "Orion")
def delete_robot(user, robot_id): if not user: return jsonify({'message': 'Not authorized'}), 401 robot = Robot.find_by_id(robot_id) if not robot: return jsonify({'message': 'Robot not found'}), 404 robot.delete_from_db() return jsonify({'message': 'success'}), 200
def show_form(self, mid, dsid): robot = Robot.get_by_id(int(mid), parent=self.current_user.key) if dsid: dataset = DataSet.get_by_id(int(dsid), parent=robot.key) else: dataset = DataSet(parent=robot.key) form = forms.DataSetForm(data=dataset.to_dict()) self.render_response('robot/datasetform.html', form=form, mid=mid, dsid=dsid, dataset=dataset)
def test_delete_robot_name(self): self.loginUser() self.giveTeamAdminAccess() Robot( id=Robot.renderKeyName(self.team.key_name, self.now.year), team=self.team.key, year=self.now.year, robot_name='First Robot Name', ).put() form = self.getTeamInfoForm(1124) self.assertEqual(form['robot_name'].value, 'First Robot Name') form['robot_name'] = '' response = form.submit().follow() self.assertEqual(response.status_int, 301) robot = Robot.get_by_id(Robot.renderKeyName('frc1124', self.now.year)) self.assertIsNone(robot)
def post_robot(): try: data = json.loads(request.get_data()) robot = Robot(**data) except: return "Your model sucks" db.session.add(robot) db.session.commit() return robot.id
def post(self): self._require_admin() team_key = self.request.get('team_key') year = int(self.request.get('robot_year')) name = self.request.get('robot_name') team = Team.get_by_id(team_key) if not team: self.abort(404) if not year or not name: self.abort(400) robot = Robot(id=Robot.renderKeyName(team_key, year), team=team.key, year=year, robot_name=name.strip()) RobotManipulator.createOrUpdate(robot) self.redirect('/admin/team/{}'.format(team.team_number))
def override_robot(user): if not user: return jsonify({'message': 'Not authorized'}), 401 robot_id = request.json.get('robot_id', None) robot = Robot.find_by_id(robot_id) if not robot: return jsonify({'message': 'Robot not found'}), 400 # Send manual command to robot robot.sendManual('manual') return jsonify({}), 201
def parse(self, response): """ Parse team info from FMSAPI Returns a tuple of: list of models (Team, DistrictTeam, Robot), and a Boolean indicating if there are more pages to be fetched """ # Get team json # don't need to null check, if error, HTTP code != 200, so we wont' get here current_page = response['pageCurrent'] total_pages = response['pageTotal'] teams = response['teams'] ret_models = [] for teamData in teams: # concat city/state/country to get address address = u"{}, {}, {}".format(teamData['city'], teamData['stateProv'], teamData['country']) team = Team( id="frc{}".format(teamData['teamNumber']), team_number=teamData['teamNumber'], name=teamData['nameFull'], nickname=teamData['nameShort'], address=address, website=teamData['website'], rookie_year=teamData['rookieYear'] ) districtTeam = None if teamData['districtCode']: districtAbbrev = DistrictType.abbrevs[teamData['districtCode'].lower()] districtTeam = DistrictTeam( id=DistrictTeam.renderKeyName(self.year, districtAbbrev, team.key_name), team=ndb.Key(Team, team.key_name), year=self.year, district=districtAbbrev ) robot = None if teamData['robotName']: robot = Robot( id=Robot.renderKeyName(team.key_name, self.year), team=ndb.Key(Team, team.key_name), year=self.year, robot_name=teamData['robotName'].strip() ) ret_models.append((team, districtTeam, robot)) return (ret_models, (current_page < total_pages))
def main(bot_id, bot_wx_account, topic_id): # 检查bot_id和微信账号是否对应, 并存储process id 存入到数据当中 logging.debug('bot starting with bot_id:{0}, bot_wx_account:{1}'.format(bot_id, bot_wx_account)) robot_model = Robot.get_model_filter(Robot.Id == bot_id, Robot.WxAccount == bot_wx_account) if robot_model is None: logging.error('bot id:{0} bot_wx_account:{1} not created'.format(bot_id, bot_wx_account)) return topic_model = Topic.get_model_by_id(topic_id) if topic_model is None or topic_model.UserId != robot_model.UserId: logging.error('bot[{0}] the topic[{1}] is not belong you'.format(bot_id, topic_id)) return robot_model.TopicId = topic_id if robot_model.get_status() != Robot.STATUS_STOPPED: logging.error('robot already starting') return root_path = os.path.dirname(os.path.abspath(__file__)) logging.debug(root_path) storage_dir = '{0}/data/{1}.pkl'.format(root_path, bot_wx_account) BotManager.set_bot(robot_model, root_path, storage_dir) robot_model.start() robot_model.Pid = os.getpid() Robot.save([robot_model]) logging.info('wxbot {0} {1} {2} {3} starting'.format(bot_id, bot_wx_account, robot_model.Pid, robot_model.TopicId)) itchat.auto_login(hotReload=True, statusStorageDir=storage_dir, qrCallback=BotManager.qr_callback, loginCallback=BotManager.login_callback, exitCallback=BotManager.exit_callback, initCallback=BotManager.init_message) itchat.run(blockThread=True, schedule=BotManager.schedule, exitCallback=BotManager.exit_callback)
def process_form(self, mid, dsid): robot = Robot.get_by_id(int(mid), parent=self.current_user.key) if dsid: dataset = DataSet.get_by_id(int(dsid), parent=robot.key) else: dataset = DataSet(parent=robot.key) form = forms.DataSetForm(self.request.POST) if form.validate(): dataset.populate(**form.data) key = dataset.put() return self.redirect_to('dataset-edit', mid=mid, dsid=key.id()) self.render_response('robot/datasetform.html', form=form, mid=mid, dsid=dsid, dataset=dataset)
def send_manual_robot(user): if not user: return jsonify({'message': 'Not authorized'}), 401 robot_id = request.json.get('robot_id', None) command = request.json.get('command', None) robot = Robot.find_by_id(robot_id) if not robot: return jsonify({'message': 'Robot not found'}), 400 if not command: return jsonify({'message': 'No command found'}), 400 if command not in [ 'auto', 'manual', 'forward', 'backward', 'right', 'left', 'exit', 'stop' ]: return jsonify({'message': 'Command not found'}), 400 # Send manual command to robot robot.sendManual(command) return jsonify({}), 201
def process_form(self, mid=None): if mid: robot = Robot.get_by_id(int(mid), parent=self.current_user.key) else: robot = Robot(parent=self.current_user.key) form = forms.RobotForm(self.request.POST, obj=robot) if form.validate(): # TODO better populate object robot.name = form.data['name'] robot.schedule = form.data['schedule'] robot.rps = form.data['rps'] robot.timeout = form.data['timeout'] robot.urlsource = URLSource.factory(form.urlsource.form.data) key = robot.put() return self.redirect_to('robot-view', mid=key.id()) self.render_response('robot/form.html', form=form, mid=mid, robot=robot)
def parse(self, response): """ Parse team info from FMSAPI Returns a tuple of models (Team, DistrictTeam, Robot) """ # Get team json # don't need to null check, if error, HTTP code != 200, so we wont' get here teams = response["teams"] teamData = teams[0] # concat city/state/country to get address address = u"{}, {}, {}".format(teamData["city"], teamData["stateProv"], teamData["country"]) team = Team( team_number=teamData["teamNumber"], name=teamData["nameFull"], nickname=teamData["nameShort"], address=address, rookie_year=teamData["rookieYear"], ) districtTeam = None if teamData["districtCode"]: districtAbbrev = DistrictType.abbrevs[teamData["districtCode"].lower()] districtTeam = DistrictTeam( id=DistrictTeam.renderKeyName(self.year, districtAbbrev, team.key_name), team=ndb.Key(Team, team.key_name), year=self.year, district=districtAbbrev, ) robot = None if teamData["robotName"]: robot = Robot( id=Robot.renderKeyName(team.key_name, self.year), team=ndb.Key(Team, team.key_name), year=self.year, robot_name=teamData["robotName"].strip(), ) return (team, districtTeam, robot)
def setUp(self): app = webapp2.WSGIApplication( [webapp2.Route(r"/<team_key:>", ApiTeamHistoryRobotsController, methods=["GET"])], debug=True ) self.testapp = webtest.TestApp(app) self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_urlfetch_stub() self.testbed.init_memcache_stub() ndb.get_context().clear_cache() # Prevent data from leaking between tests self.testbed.init_taskqueue_stub(root_path=".") self.team = Team(id="frc1124", name="UberBots", team_number=1124, nickname="UberBots") self.robot = Robot(id="frc1124_2015", team=self.team.key, year=2015, robot_name="Orion") self.team.put() self.robot.put()
def initialize_sim(self, random=False): # reset the viewer self.viewer.control_panel_state_init() # create the simulation world self.world = World(self.period) # create the robot robot = Robot() self.world.add_robot(robot) # generate a random environment if random: self.map_manager.random_map(self.world) else: self.map_manager.apply_to_world(self.world) # create the world view self.world_view = WorldView(self.world, self.viewer) # render the initial world self.draw_world()
def get_robots(cls, content): from .bot_manager import BotManager content_arr = content.split(' ') bot_id = None wx_account = None condition = [Robot.UserId == BotManager.get_robot_model().UserId] if len(content_arr) == 3: bot_id = int(content_arr[1]) wx_account = content_arr[2] condition.append(Robot.Id == bot_id) condition.append(Robot.WxAccount == wx_account) robot_models = Robot.get_models_filter(*condition) msg = '' n = 0 for robot_model in robot_models: txt = '{0} {1} {2} {3} {4}\n'.format(robot_model.Id, robot_model.WxAccount, robot_model.TopicId, robot_model.NickName, robot_model.get_status_name()) if robot_model.Id == bot_id and robot_model.WxAccount == wx_account and robot_model.LoginQrUrl and robot_model.is_wait_scan( ): logger.debug('wait scan: {0}'.format(robot_model.LoginQrUrl)) cls.send_text_msg(txt) cls.send_image_msg(robot_model.LoginQrUrl) continue n += 1 msg += txt if n % 15 == 0: cls.send_text_msg(msg) msg = '' if len(msg) > 0: cls.send_text_msg(msg)
def post(self): self._require_admin() team_key = self.request.get('team_key') year = int(self.request.get('robot_year')) name = self.request.get('robot_name') team = Team.get_by_id(team_key) if not team: self.abort(404) if not year or not name: self.abort(400) robot = Robot( id=Robot.renderKeyName(team_key, year), team=team.key, year=year, robot_name=name.strip() ) RobotManipulator.createOrUpdate(robot) self.redirect('/admin/team/{}'.format(team.team_number))
def delete(self, mid): robot = Robot.get_by_id(int(mid), parent=self.current_user.key) robot.key.delete() return self.redirect_to('robot-index')
def get(self): self._require_registration() user = self.user_bundle.account.key now = datetime.datetime.now() existing_access = TeamAdminAccess.query( TeamAdminAccess.account == user, TeamAdminAccess.expiration > now).fetch() # If the current user is an admin, allow them to view this page for any # team/year combination forced_team = self.request.get("team") forced_year = self.request.get("year") if self.user_bundle.is_current_user_admin and forced_team and forced_year: existing_access.append( TeamAdminAccess( team_number=int(forced_team), year=int(forced_year), ) ) team_keys = [ ndb.Key(Team, "frc{}".format(access.team_number)) for access in existing_access ] if not team_keys: self.redirect('/mod/redeem') return years = set([access.year for access in existing_access]) teams_future = ndb.get_multi_async(team_keys) robot_keys = [ ndb.Key(Robot, Robot.renderKeyName(team.id(), now.year)) for team in team_keys ] robots_future = ndb.get_multi_async(robot_keys) social_media_futures = [ media_query.TeamSocialMediaQuery(team_key.id()).fetch_async() for team_key in team_keys ] team_medias_future = Media.query( Media.references.IN(team_keys), Media.year.IN(years)).fetch_async(50) suggestions_future = Suggestion.query( Suggestion.review_state == Suggestion.REVIEW_PENDING).filter( Suggestion.target_model.IN( self.ALLOWED_SUGGESTION_TYPES)).filter( Suggestion.target_key.IN([k.id() for k in team_keys ])).fetch_async(limit=50) team_num_to_team = { team.get_result().team_number: team.get_result() for team in teams_future } team_num_to_robot_name = { int(robot.get_result().team.id()[3:]): robot.get_result().robot_name for robot in robots_future if robot.get_result() is not None } team_medias = defaultdict(lambda: defaultdict(list)) for media in team_medias_future.get_result(): for reference in media.references: if reference in team_keys: team_num = reference.id()[3:] team_medias[int(team_num)][media.year].append(media) team_social_medias = defaultdict(list) for team_social_media_future in social_media_futures: social_medias = team_social_media_future.get_result() for media in social_medias: for reference in media.references: if reference in team_keys: team_num = reference.id()[3:] team_social_medias[int(team_num)].append(media) suggestions_by_team = defaultdict(lambda: defaultdict(list)) for suggestion in suggestions_future.get_result(): if not suggestion.target_key: continue # Assume all the keys are team keys team_num = suggestion.target_key[3:] suggestions_by_team[int(team_num)][suggestion.target_model].append( suggestion) self.template_values.update({ "existing_access": existing_access, "teams": team_num_to_team, "robot_names_by_team": team_num_to_robot_name, "team_medias": team_medias, "team_social_medias": team_social_medias, "suggestions_by_team": suggestions_by_team, "suggestion_names": self.SUGGESTION_NAMES, "suggestion_review_urls": self.SUGGESTION_REVIEW_URL, }) self.response.out.write( jinja2_engine.render('team_admin_dashboard.html', self.template_values))
def render_team_details(cls, handler, team, year, is_canonical): media_future = media_query.TeamYearMediaQuery(team.key.id(), year).fetch_async() robot_future = Robot.get_by_id_async('{}_{}'.format( team.key.id(), year)) team_districts_future = team_query.TeamDistrictsQuery( team.key.id()).fetch_async() events_sorted, matches_by_event_key, awards_by_event_key, valid_years = TeamDetailsDataFetcher.fetch( team, year, return_valid_years=True) if not events_sorted: return None participation = [] year_wlt_list = [] year_match_avg_list = [] current_event = None matches_upcoming = None short_cache = False for event in events_sorted: event_matches = matches_by_event_key.get(event.key, []) event_awards = AwardHelper.organizeAwards( awards_by_event_key.get(event.key, [])) matches_organized = MatchHelper.organizeMatches(event_matches) if event.now: current_event = event matches_upcoming = MatchHelper.upcomingMatches(event_matches) if event.within_a_day: short_cache = True if year == 2015: display_wlt = None match_avg = EventHelper.calculateTeamAvgScoreFromMatches( team.key_name, event_matches) year_match_avg_list.append(match_avg) qual_avg, elim_avg, _, _ = match_avg else: qual_avg = None elim_avg = None wlt = EventHelper.calculateTeamWLTFromMatches( team.key_name, event_matches) year_wlt_list.append(wlt) if wlt["win"] + wlt["loss"] + wlt["tie"] == 0: display_wlt = None else: display_wlt = wlt team_rank = None if event.rankings: for element in event.rankings: if str(element[1]) == str(team.team_number): team_rank = element[0] break participation.append({ 'event': event, 'matches': matches_organized, 'wlt': display_wlt, 'qual_avg': qual_avg, 'elim_avg': elim_avg, 'rank': team_rank, 'awards': event_awards }) if year == 2015: year_wlt = None year_qual_scores = [] year_elim_scores = [] for _, _, event_qual_scores, event_elim_scores in year_match_avg_list: year_qual_scores += event_qual_scores year_elim_scores += event_elim_scores year_qual_avg = float(sum(year_qual_scores)) / len( year_qual_scores) if year_qual_scores != [] else None year_elim_avg = float(sum(year_elim_scores)) / len( year_elim_scores) if year_elim_scores != [] else None else: year_qual_avg = None year_elim_avg = None year_wlt = {"win": 0, "loss": 0, "tie": 0} for wlt in year_wlt_list: year_wlt["win"] += wlt["win"] year_wlt["loss"] += wlt["loss"] year_wlt["tie"] += wlt["tie"] if year_wlt["win"] + year_wlt["loss"] + year_wlt["tie"] == 0: year_wlt = None medias_by_slugname = MediaHelper.group_by_slugname( [media for media in media_future.get_result()]) image_medias = MediaHelper.get_images( [media for media in media_future.get_result()]) district_name = None district_abbrev = None team_districts = team_districts_future.get_result() if year in team_districts: district_key = team_districts[year] district_abbrev = district_key[4:] district_type = DistrictType.abbrevs[district_abbrev] district_name = DistrictType.type_names[district_type] handler.template_values.update({ "is_canonical": is_canonical, "team": team, "participation": participation, "year": year, "years": valid_years, "year_wlt": year_wlt, "year_qual_avg": year_qual_avg, "year_elim_avg": year_elim_avg, "current_event": current_event, "matches_upcoming": matches_upcoming, "medias_by_slugname": medias_by_slugname, "image_medias": image_medias, "robot": robot_future.get_result(), "district_name": district_name, "district_abbrev": district_abbrev, }) if short_cache: handler._cache_expiration = handler.SHORT_CACHE_EXPIRATION return jinja2_engine.render('team_details.html', handler.template_values)
def view_crawl(self, mid, cid): robot = Robot.get_by_id(int(mid), parent=self.current_user.key) crawl = Crawl.get_by_id(int(cid), parent=robot.key) jobs = crawl.jobs self.render_response('robot/crawl.html', robot=robot, crawl=crawl, jobs=jobs)
def run(self, mid): robot = Robot.get_by_id(int(mid), parent=self.current_user.key) taskmanager.enqueue_robot('/task/runrobot', robot) self.response.write(json.encode({'status': 'ok'}))
def render_team_details(cls, handler, team, year, is_canonical): media_future = media_query.TeamYearMediaQuery(team.key.id(), year).fetch_async() social_media_future = media_query.TeamSocialMediaQuery(team.key.id()).fetch_async() robot_future = Robot.get_by_id_async('{}_{}'.format(team.key.id(), year)) team_districts_future = team_query.TeamDistrictsQuery(team.key.id()).fetch_async() participation_future = team_query.TeamParticipationQuery(team.key.id()).fetch_async() events_sorted, matches_by_event_key, awards_by_event_key, valid_years = TeamDetailsDataFetcher.fetch(team, year, return_valid_years=True) if not events_sorted: return None participation = [] season_wlt_list = [] offseason_wlt_list = [] year_match_avg_list = [] current_event = None matches_upcoming = None short_cache = False for event in events_sorted: event_matches = matches_by_event_key.get(event.key, []) event_awards = AwardHelper.organizeAwards(awards_by_event_key.get(event.key, [])) matches_organized = MatchHelper.organizeMatches(event_matches) if event.now: current_event = event matches_upcoming = MatchHelper.upcomingMatches(event_matches) if event.within_a_day: short_cache = True if year == 2015: display_wlt = None match_avg = EventHelper.calculateTeamAvgScoreFromMatches(team.key_name, event_matches) year_match_avg_list.append(match_avg) qual_avg, elim_avg, _, _ = match_avg else: qual_avg = None elim_avg = None wlt = EventHelper.calculateTeamWLTFromMatches(team.key_name, event_matches) if event.event_type_enum in EventType.SEASON_EVENT_TYPES: season_wlt_list.append(wlt) else: offseason_wlt_list.append(wlt) if wlt["win"] + wlt["loss"] + wlt["tie"] == 0: display_wlt = None else: display_wlt = wlt team_rank = None if event.rankings: for element in event.rankings: if str(element[1]) == str(team.team_number): team_rank = element[0] break participation.append({'event': event, 'matches': matches_organized, 'wlt': display_wlt, 'qual_avg': qual_avg, 'elim_avg': elim_avg, 'rank': team_rank, 'awards': event_awards}) season_wlt = None offseason_wlt = None if year == 2015: year_wlt = None year_qual_scores = [] year_elim_scores = [] for _, _, event_qual_scores, event_elim_scores in year_match_avg_list: year_qual_scores += event_qual_scores year_elim_scores += event_elim_scores year_qual_avg = float(sum(year_qual_scores)) / len(year_qual_scores) if year_qual_scores != [] else None year_elim_avg = float(sum(year_elim_scores)) / len(year_elim_scores) if year_elim_scores != [] else None else: year_qual_avg = None year_elim_avg = None season_wlt = {"win": 0, "loss": 0, "tie": 0} offseason_wlt = {"win": 0, "loss": 0, "tie": 0} for wlt in season_wlt_list: season_wlt["win"] += wlt["win"] season_wlt["loss"] += wlt["loss"] season_wlt["tie"] += wlt["tie"] if season_wlt["win"] + season_wlt["loss"] + season_wlt["tie"] == 0: season_wlt = None for wlt in offseason_wlt_list: offseason_wlt["win"] += wlt["win"] offseason_wlt["loss"] += wlt["loss"] offseason_wlt["tie"] += wlt["tie"] if offseason_wlt["win"] + offseason_wlt["loss"] + offseason_wlt["tie"] == 0: offseason_wlt = None medias_by_slugname = MediaHelper.group_by_slugname([media for media in media_future.get_result()]) image_medias = MediaHelper.get_images(media_future.get_result()) social_medias = sorted(social_media_future.get_result(), key=MediaHelper.social_media_sorter) preferred_image_medias = filter(lambda x: team.key in x.preferred_references, image_medias) district_name = None district_abbrev = None team_districts = team_districts_future.get_result() for district in team_districts: if district.year == year: district_abbrev = district.abbreviation district_name = district.display_name last_competed = None participation_years = participation_future.get_result() if len(participation_years) > 0: last_competed = max(participation_years) current_year = datetime.date.today().year handler.template_values.update({ "is_canonical": is_canonical, "team": team, "participation": participation, "year": year, "years": valid_years, "season_wlt": season_wlt, "offseason_wlt": offseason_wlt, "year_qual_avg": year_qual_avg, "year_elim_avg": year_elim_avg, "current_event": current_event, "matches_upcoming": matches_upcoming, "medias_by_slugname": medias_by_slugname, "social_medias": social_medias, "image_medias": image_medias, "preferred_image_medias": preferred_image_medias, "robot": robot_future.get_result(), "district_name": district_name, "district_abbrev": district_abbrev, "last_competed": last_competed, "current_year": current_year, }) if short_cache: handler._cache_expiration = handler.SHORT_CACHE_EXPIRATION return jinja2_engine.render('team_details.html', handler.template_values)
def delete(self, mid, dsid): robot = Robot.get_by_id(int(mid), parent=self.current_user.key) dataset = DataSet.get_by_id(int(dsid), parent=robot.key) dataset.key.delete() return self.redirect_to('robot-view', mid=mid)
def index(self): template_vars = { 'robots': Robot.list(ancestor=self.current_user.key), 'schedules': SCHEDULES } self.render_response('robot/index.html', **template_vars)
def _query_async(self): team_key = self._query_args[0] robots = yield Robot.query( Robot.team == ndb.Key(Team, team_key)).fetch_async() raise ndb.Return(robots)
def enqueue_scheduled_robots(schedule, url): for robot in Robot.get_scheduled_robots(schedule): # TODO batch add taskqueue.add(url=url, payload=ndb_serialize.dumps(robot))
def test_list_returns_one(self): groupkey = ndb.Key('_', '_') robot = Robot(parent=groupkey, name='_', schedule=SCHEDULES.keys()[0]) robot.put() self.assertEqual(Robot.list(ancestor=groupkey), [robot])
def test_list_returns_empty_list(self): groupkey = ndb.Key('_', '_') self.assertEqual(Robot.list(ancestor=groupkey), [])
def view(self, mid): robot = Robot.get_by_id(int(mid), parent=self.current_user.key) crawls = Crawl.query(ancestor=robot.key).order(-Crawl.started_at).fetch() datasets = DataSet.query(ancestor=robot.key).fetch() self.render_response('robot/view.html', robot=robot, mid=mid, crawls=crawls, datasets=datasets, schedules=SCHEDULES)
def get(self): self._require_registration() user = self.user_bundle.account.key now = datetime.datetime.now() existing_access = TeamAdminAccess.query( TeamAdminAccess.account == user, TeamAdminAccess.expiration > now).fetch() # If the current user is an admin, allow them to view this page for any # team/year combination forced_team = self.request.get("team") forced_year = self.request.get("year") if self.user_bundle.is_current_user_admin and forced_team and forced_year: existing_access.append( TeamAdminAccess( team_number=int(forced_team), year=int(forced_year), )) team_keys = [ ndb.Key(Team, "frc{}".format(access.team_number)) for access in existing_access ] if not team_keys: self.redirect('/mod/redeem') return years = set([access.year for access in existing_access]) teams_future = ndb.get_multi_async(team_keys) robot_keys = [ ndb.Key(Robot, Robot.renderKeyName(team.id(), now.year)) for team in team_keys ] robots_future = ndb.get_multi_async(robot_keys) social_media_futures = [ media_query.TeamSocialMediaQuery(team_key.id()).fetch_async() for team_key in team_keys ] team_medias_future = Media.query(Media.references.IN(team_keys), Media.year.IN(years)).fetch_async(50) suggestions_future = Suggestion.query( Suggestion.review_state == Suggestion.REVIEW_PENDING).filter( Suggestion.target_model.IN( self.ALLOWED_SUGGESTION_TYPES)).filter( Suggestion.target_key.IN([k.id() for k in team_keys ])).fetch_async(limit=50) team_num_to_team = { team.get_result().team_number: team.get_result() for team in teams_future } team_num_to_robot_name = { int(robot.get_result().team.id()[3:]): robot.get_result().robot_name for robot in robots_future if robot.get_result() is not None } team_medias = defaultdict(lambda: defaultdict(list)) for media in team_medias_future.get_result(): for reference in media.references: if reference in team_keys: team_num = reference.id()[3:] team_medias[int(team_num)][media.year].append(media) team_social_medias = defaultdict(list) for team_social_media_future in social_media_futures: social_medias = team_social_media_future.get_result() for media in social_medias: for reference in media.references: if reference in team_keys: team_num = reference.id()[3:] team_social_medias[int(team_num)].append(media) suggestions_by_team = defaultdict(lambda: defaultdict(list)) for suggestion in suggestions_future.get_result(): if not suggestion.target_key: continue # Assume all the keys are team keys team_num = suggestion.target_key[3:] suggestions_by_team[int(team_num)][suggestion.target_model].append( suggestion) self.template_values.update({ "existing_access": existing_access, "teams": team_num_to_team, "robot_names_by_team": team_num_to_robot_name, "team_medias": team_medias, "team_social_medias": team_social_medias, "suggestions_by_team": suggestions_by_team, "suggestion_names": self.SUGGESTION_NAMES, "suggestion_review_urls": self.SUGGESTION_REVIEW_URL, }) self.response.out.write( jinja2_engine.render('team_admin_dashboard.html', self.template_values))
def render_team_details(cls, handler, team, year, is_canonical): media_future = media_query.TeamYearMediaQuery(team.key.id(), year).fetch_async() social_media_future = media_query.TeamSocialMediaQuery(team.key.id()).fetch_async() robot_future = Robot.get_by_id_async('{}_{}'.format(team.key.id(), year)) team_districts_future = team_query.TeamDistrictsQuery(team.key.id()).fetch_async() participation_future = team_query.TeamParticipationQuery(team.key.id()).fetch_async() events_sorted, matches_by_event_key, awards_by_event_key, valid_years = TeamDetailsDataFetcher.fetch(team, year, return_valid_years=True) if not events_sorted: return None participation = [] season_wlt_list = [] offseason_wlt_list = [] year_match_avg_list = [] current_event = None matches_upcoming = None short_cache = False for event in events_sorted: event_matches = matches_by_event_key.get(event.key, []) event_awards = AwardHelper.organizeAwards(awards_by_event_key.get(event.key, [])) matches_organized = MatchHelper.organizeMatches(event_matches) if event.now: current_event = event matches_upcoming = MatchHelper.upcomingMatches(event_matches) if event.within_a_day: short_cache = True if year == 2015: display_wlt = None match_avg = EventHelper.calculateTeamAvgScoreFromMatches(team.key_name, event_matches) year_match_avg_list.append(match_avg) qual_avg, elim_avg, _, _ = match_avg else: qual_avg = None elim_avg = None wlt = EventHelper.calculateTeamWLTFromMatches(team.key_name, event_matches) if event.event_type_enum in EventType.SEASON_EVENT_TYPES: season_wlt_list.append(wlt) else: offseason_wlt_list.append(wlt) if wlt["win"] + wlt["loss"] + wlt["tie"] == 0: display_wlt = None else: display_wlt = wlt team_rank = None if event.rankings: for element in event.rankings: if str(element[1]) == str(team.team_number): team_rank = element[0] break participation.append({'event': event, 'matches': matches_organized, 'wlt': display_wlt, 'qual_avg': qual_avg, 'elim_avg': elim_avg, 'rank': team_rank, 'awards': event_awards}) season_wlt = None offseason_wlt = None if year == 2015: year_wlt = None year_qual_scores = [] year_elim_scores = [] for _, _, event_qual_scores, event_elim_scores in year_match_avg_list: year_qual_scores += event_qual_scores year_elim_scores += event_elim_scores year_qual_avg = float(sum(year_qual_scores)) / len(year_qual_scores) if year_qual_scores != [] else None year_elim_avg = float(sum(year_elim_scores)) / len(year_elim_scores) if year_elim_scores != [] else None else: year_qual_avg = None year_elim_avg = None season_wlt = {"win": 0, "loss": 0, "tie": 0} offseason_wlt = {"win": 0, "loss": 0, "tie": 0} for wlt in season_wlt_list: season_wlt["win"] += wlt["win"] season_wlt["loss"] += wlt["loss"] season_wlt["tie"] += wlt["tie"] if season_wlt["win"] + season_wlt["loss"] + season_wlt["tie"] == 0: season_wlt = None for wlt in offseason_wlt_list: offseason_wlt["win"] += wlt["win"] offseason_wlt["loss"] += wlt["loss"] offseason_wlt["tie"] += wlt["tie"] if offseason_wlt["win"] + offseason_wlt["loss"] + offseason_wlt["tie"] == 0: offseason_wlt = None medias_by_slugname = MediaHelper.group_by_slugname([media for media in media_future.get_result()]) image_medias = MediaHelper.get_images(media_future.get_result()) social_medias = sorted(social_media_future.get_result(), key=MediaHelper.social_media_sorter) preferred_image_medias = filter(lambda x: team.key in x.preferred_references, image_medias) district_name = None district_abbrev = None team_districts = team_districts_future.get_result() if year in team_districts: district_key = team_districts[year] district_abbrev = district_key[4:] district_type = DistrictType.abbrevs[district_abbrev] district_name = DistrictType.type_names[district_type] last_competed = None participation_years = participation_future.get_result() if len(participation_years) > 0: last_competed = max(participation_years) current_year = datetime.date.today().year handler.template_values.update({ "is_canonical": is_canonical, "team": team, "participation": participation, "year": year, "years": valid_years, "season_wlt": season_wlt, "offseason_wlt": offseason_wlt, "year_qual_avg": year_qual_avg, "year_elim_avg": year_elim_avg, "current_event": current_event, "matches_upcoming": matches_upcoming, "medias_by_slugname": medias_by_slugname, "social_medias": social_medias, "image_medias": image_medias, "preferred_image_medias": preferred_image_medias, "robot": robot_future.get_result(), "district_name": district_name, "district_abbrev": district_abbrev, "last_competed": last_competed, "current_year": current_year, }) if short_cache: handler._cache_expiration = handler.SHORT_CACHE_EXPIRATION return jinja2_engine.render('team_details.html', handler.template_values)
def render_team_details(cls, handler, team, year, is_canonical): hof_award_future = award_query.TeamEventTypeAwardsQuery(team.key.id(), EventType.CMP_FINALS, AwardType.CHAIRMANS).fetch_async() hof_video_future = media_query.TeamTagMediasQuery(team.key.id(), MediaTag.CHAIRMANS_VIDEO).fetch_async() hof_presentation_future = media_query.TeamTagMediasQuery(team.key.id(), MediaTag.CHAIRMANS_PRESENTATION).fetch_async() hof_essay_future = media_query.TeamTagMediasQuery(team.key.id(), MediaTag.CHAIRMANS_ESSAY).fetch_async() media_future = media_query.TeamYearMediaQuery(team.key.id(), year).fetch_async() social_media_future = media_query.TeamSocialMediaQuery(team.key.id()).fetch_async() robot_future = Robot.get_by_id_async("{}_{}".format(team.key.id(), year)) team_districts_future = team_query.TeamDistrictsQuery(team.key.id()).fetch_async() participation_future = team_query.TeamParticipationQuery(team.key.id()).fetch_async() hof_awards = hof_award_future.get_result() hof_video = hof_video_future.get_result() hof_presentation = hof_presentation_future.get_result() hof_essay = hof_essay_future.get_result() hall_of_fame = { "is_hof": len(hof_awards) > 0, "years": [award.year for award in hof_awards], "media": { "video": hof_video[0].youtube_url_link if len(hof_video) > 0 else None, "presentation": hof_presentation[0].youtube_url_link if len(hof_presentation) > 0 else None, "essay": hof_essay[0].external_link if len(hof_essay) > 0 else None, }, } events_sorted, matches_by_event_key, awards_by_event_key, valid_years = TeamDetailsDataFetcher.fetch(team, year, return_valid_years=True) if not events_sorted: return None district_name = None district_abbrev = None team_district_points = None team_districts = team_districts_future.get_result() for district in team_districts: if district and district.year == year: district_abbrev = district.abbreviation district_name = district.display_name if district.rankings: team_district_points = next( iter(filter(lambda r: r['team_key'] == team.key_name, district.rankings)), None) break participation = [] season_wlt_list = [] offseason_wlt_list = [] year_match_avg_list = [] current_event = None matches_upcoming = None short_cache = False for event in events_sorted: event_matches = matches_by_event_key.get(event.key, []) event_awards = AwardHelper.organizeAwards(awards_by_event_key.get(event.key, [])) matches_organized = MatchHelper.organizeMatches(event_matches) if event.now: current_event = event matches_upcoming = MatchHelper.upcomingMatches(event_matches) if event.within_a_day: short_cache = True if year == 2015: display_wlt = None match_avg = EventHelper.calculateTeamAvgScoreFromMatches(team.key_name, event_matches) year_match_avg_list.append(match_avg) qual_avg, elim_avg, _, _ = match_avg else: qual_avg = None elim_avg = None wlt = EventHelper.calculateTeamWLTFromMatches(team.key_name, event_matches) if event.event_type_enum in EventType.SEASON_EVENT_TYPES: season_wlt_list.append(wlt) else: offseason_wlt_list.append(wlt) if wlt["win"] + wlt["loss"] + wlt["tie"] == 0: display_wlt = None else: display_wlt = wlt team_rank = None if event.details and event.details.rankings2: for ranking in event.details.rankings2: if ranking['team_key'] == team.key.id(): team_rank = ranking['rank'] break video_ids = [] playlist = "" for level in Match.COMP_LEVELS: matches = matches_organized[level] for match in matches: video_ids += [video.split("?")[0] for video in match.youtube_videos] if video_ids: playlist_title = u"{} (Team {})".format(event.name, team.team_number) playlist = u"https://www.youtube.com/watch_videos?video_ids={}&title={}" playlist = playlist.format(u",".join(video_ids), playlist_title) district_points = None if team_district_points: district_points = next( iter( filter(lambda e: e['event_key'] == event.key_name, team_district_points['event_points'])), None) participation.append({ "event": event, "matches": matches_organized, "wlt": display_wlt, "qual_avg": qual_avg, "elim_avg": elim_avg, "rank": team_rank, "awards": event_awards, "playlist": playlist, "district_points": district_points, }) season_wlt = None offseason_wlt = None if year == 2015: year_wlt = None year_qual_scores = [] year_elim_scores = [] for _, _, event_qual_scores, event_elim_scores in year_match_avg_list: year_qual_scores += event_qual_scores year_elim_scores += event_elim_scores year_qual_avg = float(sum(year_qual_scores)) / len(year_qual_scores) if year_qual_scores != [] else None year_elim_avg = float(sum(year_elim_scores)) / len(year_elim_scores) if year_elim_scores != [] else None else: year_qual_avg = None year_elim_avg = None season_wlt = {"win": 0, "loss": 0, "tie": 0} offseason_wlt = {"win": 0, "loss": 0, "tie": 0} for wlt in season_wlt_list: season_wlt["win"] += wlt["win"] season_wlt["loss"] += wlt["loss"] season_wlt["tie"] += wlt["tie"] if season_wlt["win"] + season_wlt["loss"] + season_wlt["tie"] == 0: season_wlt = None for wlt in offseason_wlt_list: offseason_wlt["win"] += wlt["win"] offseason_wlt["loss"] += wlt["loss"] offseason_wlt["tie"] += wlt["tie"] if offseason_wlt["win"] + offseason_wlt["loss"] + offseason_wlt["tie"] == 0: offseason_wlt = None medias_by_slugname = MediaHelper.group_by_slugname([media for media in media_future.get_result()]) avatar = MediaHelper.get_avatar(media_future.get_result()) image_medias = MediaHelper.get_images(media_future.get_result()) social_medias = sorted(social_media_future.get_result(), key=MediaHelper.social_media_sorter) preferred_image_medias = filter(lambda x: team.key in x.preferred_references, image_medias) last_competed = None participation_years = participation_future.get_result() if len(participation_years) > 0: last_competed = max(participation_years) current_year = datetime.date.today().year handler.template_values.update({ "is_canonical": is_canonical, "team": team, "participation": participation, "year": year, "years": valid_years, "season_wlt": season_wlt, "offseason_wlt": offseason_wlt, "year_qual_avg": year_qual_avg, "year_elim_avg": year_elim_avg, "current_event": current_event, "matches_upcoming": matches_upcoming, "medias_by_slugname": medias_by_slugname, "avatar": avatar, "social_medias": social_medias, "image_medias": image_medias, "preferred_image_medias": preferred_image_medias, "robot": robot_future.get_result(), "district_name": district_name, "district_abbrev": district_abbrev, "last_competed": last_competed, "current_year": current_year, "max_year": tba_config.MAX_YEAR, "hof": hall_of_fame, "team_district_points": team_district_points, }) if short_cache: handler._cache_expiration = handler.SHORT_CACHE_EXPIRATION return jinja2_engine.render("team_details.html", handler.template_values)
def render_team_details(cls, handler, team, year, is_canonical): media_future = media_query.TeamYearMediaQuery(team.key.id(), year).fetch_async() robot_future = Robot.get_by_id_async('{}_{}'.format(team.key.id(), year)) team_districts_future = team_query.TeamDistrictsQuery(team.key.id()).fetch_async() events_sorted, matches_by_event_key, awards_by_event_key, valid_years = TeamDetailsDataFetcher.fetch(team, year, return_valid_years=True) if not events_sorted: return None participation = [] year_wlt_list = [] year_match_avg_list = [] current_event = None matches_upcoming = None short_cache = False for event in events_sorted: event_matches = matches_by_event_key.get(event.key, []) event_awards = AwardHelper.organizeAwards(awards_by_event_key.get(event.key, [])) matches_organized = MatchHelper.organizeMatches(event_matches) if event.now: current_event = event matches_upcoming = MatchHelper.upcomingMatches(event_matches) if event.within_a_day: short_cache = True if year == 2015: display_wlt = None match_avg = EventHelper.calculateTeamAvgScoreFromMatches(team.key_name, event_matches) year_match_avg_list.append(match_avg) qual_avg, elim_avg, _, _ = match_avg else: qual_avg = None elim_avg = None wlt = EventHelper.calculateTeamWLTFromMatches(team.key_name, event_matches) year_wlt_list.append(wlt) if wlt["win"] + wlt["loss"] + wlt["tie"] == 0: display_wlt = None else: display_wlt = wlt team_rank = None if event.rankings: for element in event.rankings: if str(element[1]) == str(team.team_number): team_rank = element[0] break participation.append({'event': event, 'matches': matches_organized, 'wlt': display_wlt, 'qual_avg': qual_avg, 'elim_avg': elim_avg, 'rank': team_rank, 'awards': event_awards}) if year == 2015: year_wlt = None year_qual_scores = [] year_elim_scores = [] for _, _, event_qual_scores, event_elim_scores in year_match_avg_list: year_qual_scores += event_qual_scores year_elim_scores += event_elim_scores year_qual_avg = float(sum(year_qual_scores)) / len(year_qual_scores) if year_qual_scores != [] else None year_elim_avg = float(sum(year_elim_scores)) / len(year_elim_scores) if year_elim_scores != [] else None else: year_qual_avg = None year_elim_avg = None year_wlt = {"win": 0, "loss": 0, "tie": 0} for wlt in year_wlt_list: year_wlt["win"] += wlt["win"] year_wlt["loss"] += wlt["loss"] year_wlt["tie"] += wlt["tie"] if year_wlt["win"] + year_wlt["loss"] + year_wlt["tie"] == 0: year_wlt = None medias_by_slugname = MediaHelper.group_by_slugname([media for media in media_future.get_result()]) district_name = None district_abbrev = None team_districts = team_districts_future.get_result() if year in team_districts: district_key = team_districts[year] district_abbrev = district_key[4:] district_type = DistrictType.abbrevs[district_abbrev] district_name = DistrictType.type_names[district_type] handler.template_values.update({ "is_canonical": is_canonical, "team": team, "participation": participation, "year": year, "years": valid_years, "year_wlt": year_wlt, "year_qual_avg": year_qual_avg, "year_elim_avg": year_elim_avg, "current_event": current_event, "matches_upcoming": matches_upcoming, "medias_by_slugname": medias_by_slugname, "robot": robot_future.get_result(), "district_name": district_name, "district_abbrev": district_abbrev, }) if short_cache: handler._cache_expiration = handler.SHORT_CACHE_EXPIRATION path = os.path.join(os.path.dirname(__file__), '../templates/team_details.html') return template.render(path, handler.template_values)
lightred1 = np.array([0, 100, 20]) darkred1 = np.array([5, 255, 255]) lightred2 = np.array([175, 100, 20]) darkred2 = np.array([179, 255, 255]) lightblue = np.array([100, 100, 20]) darkblue = np.array([125, 255, 255]) lightgreen = np.array([40, 50, 50]) darkgreen = np.array([80, 255, 255]) robot = Robot(vrep, client_id, name='Pioneer_p3dx{}'.format(sufix), name_motor_left='Pioneer_p3dx_leftMotor{}'.format(sufix), name_motor_right='Pioneer_p3dx_rightMotor{}'.format(sufix), name_camera='Vision_sensor{}'.format(sufix), name_prox_sensor='Proximity_sensor{}'.format(sufix), name_payload='Payload{}'.format(sufix), velocity=VELOCITY_SLOW, velocity_rotation=VELOCITY_ROTATION_SLOW, tork=TORK_SLOW, tork_rotation=TORK_ROTATE_SLOW, error=ERROR) def shaped(mask, color): contours, _ = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) for cont in contours: area = cv2.contourArea(cont) approx = cv2.approxPolyDP(cont, 0.01 * cv2.arcLength(cont, True), True) x = approx.ravel()[0]
from uploader import Uploader from models.map import Map from models.robot import Robot from settings import MONGO_URI mongo = MongoClient(MONGO_URI) db = mongo.ways_db mapper = Map() up = Uploader(db, mapper) if __name__ == "__main__": loop = asyncio.get_event_loop() filename = input("type here name of file with map: ") try: up.upload(filename) except FileNotFoundError: print('Wrong filename') sys.exit() tasks = [ loop.create_task(Robot().walking(route.get('map', {}).get('data'))) for route in db.ways.find({}) ] try: loop.run_until_complete(asyncio.wait(tasks)) except ValueError: print('No routs found') finally: loop.close()