def __enter__(self): try: self.meta = utils.Struct(**self.model.load(self.checkpoint_location())) if not self.args or self.args.LOAD_PARAMES_FROM_CHECKPOINT: self.args = dict(self.args.items() +i) except Exception as e: self.ledger.debug("Exception: ", e)
def AssertGetFeaturesFromPlacesLayer(self, api_response_content, expected_results): """Verifies GetFeaturesFromPlacesLayer with given input and output. Prepares a mock for urlfetch to return given api_response_content on a call to the Places API. Verifies that GetJsonFromGooglePlacesApi returns expected_results given the urlfetch mock setup. Args: api_response_content: Content that urlfetch should return expected_results: an array of Places results that GetJsonFromGooglePlacesApi should return """ config.Set('google_api_server_key', 'someFakeApiKey') # Simulate a successful fetch from Places API by setting up a fake # for urlfetch url = ('https://maps.googleapis.com/maps/api/place/nearbysearch/json?' 'location=20.0%2C50.0' '&rankby=distance' '&types=pharmacy' '&key=someFakeApiKey') url_responses = {url: utils.Struct(content=api_response_content)} self.mox.stubs.Set(urlfetch, 'fetch', lambda url, **kwargs: url_responses[url]) # Get Features based on Google Places API results for the layer self.assertEquals( expected_results, card.GetFeaturesFromPlacesLayer( MAP_ROOT.get('layers')[3], ndb.GeoPt(20, 50))) self.mox.UnsetStubs()
def alphabeta_search(board, game, finish_by=None): "Find a move based on an alpha-beta search of the game tree." # Pick at least some default to move if we're really slow. # We also keep it updated each level further we go down. best_completed_move = board.moves()[0] try: state = TronState(board, tron.ME) # Use iterative deepening search around the AIMA minimax # algorithm (the one that uses alpha-beta pruning and allows # the search to be cutoff and evaluated early. We deepen by # 2 to account for the way the moves are handled simultaneously # by the game class to match the production engine. for depth_limit in xrange(2, sys.maxint, 2): stats = utils.Struct(nodes=0, max_depth=0) cutoff_fn = make_cutoff_fn(depth_limit, stats, finish_by, game) move = games.alphabeta_search(state, game, None, cutoff_fn, eval_fn) # Return this move if we didn't get any deeper. if stats.nodes <= 2: return game.move_to_return(move) else: best_completed_move = game.move_to_return(move) except TimeAlmostUp: return best_completed_move
def testFetchSecondTime(self): # Simulate a successful fetch of a document that was previously fetched. self.mox.StubOutWithMock(urlfetch, 'fetch') headers = {'If-none-match': ETAG} urlfetch.fetch(SOURCE_URL, headers=headers, deadline=30).AndReturn( utils.Struct(status_code=200, headers=RESPONSE_HEADERS_2, content=SIMPLE_KML_2)) self.mox.ReplayAll() self.assertEquals( { 'fetch_status': 200, 'fetch_length': len(SIMPLE_KML_2), 'fetch_last_modified': LAST_MODIFIED_STRING_2, 'fetch_etag': ETAG_2, 'update_time': LAST_MODIFIED_TIMESTAMP_2, 'length': len(SIMPLE_KML_2), 'md5_hash': hashlib.md5(SIMPLE_KML_2).hexdigest() }, metadata_fetch.FetchAndUpdateMetadata( { 'fetch_status': 200, 'fetch_length': len(SIMPLE_KML), 'fetch_last_modified': LAST_MODIFIED_STRING, 'fetch_etag': ETAG, 'update_time': LAST_MODIFIED_TIMESTAMP, 'length': len(SIMPLE_KML), 'md5_hash': hashlib.md5(SIMPLE_KML).hexdigest() }, SOURCE_ADDRESS)) self.mox.VerifyAll()
def testFetchWithEtag(self): # Verify that we send "If-none-match", and simulate getting a 304. self.mox.StubOutWithMock(urlfetch, 'fetch') headers = {'If-none-match': ETAG} urlfetch.fetch(SOURCE_URL, headers=headers, deadline=30).AndReturn( utils.Struct(status_code=304, headers={}, content='Not modified')) self.mox.ReplayAll() # Pretend there is existing metadata for 1234 bytes of content. old_metadata = { 'fetch_status': 200, 'fetch_length': 1234, 'fetch_etag': ETAG, 'length': 1234, 'md5_hash': 'foo' } # Updated metadata should be the same except fetch_status and fetch_length. self.assertEquals( { 'fetch_status': 304, 'fetch_length': len('Not modified'), 'fetch_etag': ETAG, 'length': 1234, 'md5_hash': 'foo' }, metadata_fetch.FetchAndUpdateMetadata(old_metadata, SOURCE_ADDRESS)) self.mox.VerifyAll()
def main(): ''' Test environment with the given model ''' with open('parameters.yml', 'r') as conf: args = yaml.load(conf, Loader=yaml.FullLoader) args = utils.Struct(**args) test_agents(args)
def DoTest(kml, expected): self.mox.StubOutWithMock(urlfetch, 'fetch') urlfetch.fetch(url).AndReturn(utils.Struct(content=kml)) self.mox.ReplayAll() self.assertEquals(expected, GetLegendItems.GetKmlFromUrl(url)) self.mox.VerifyAll() self.mox.UnsetStubs()
def __enter__(self): # TODO: load parames from checkpoint if self.args.CHECKPOINT is not '': # try: cp = self.model.load(self.checkpoint_location(self.args.CHECKPOINT)) self.args = utils.Struct(**cp['args'], **vars(self.args)) # except Exception as e: # self.ledger.raise_(e, 'error with enter.') return self
def testGatherMetadataWmsInvalidXml(self): self.maxDiff = None # An WMS response with invalid XML. self.assertTrue( metadata_fetch.GatherMetadata( 'WMS', utils.Struct(status_code=200, headers=RESPONSE_HEADERS, content=INVALID_XML_WMS_RESPONSE))['ill_formed'])
def match(self, request): # pylint: disable=g-bad-name domain = request.get('domain', None) match = self.DOMAIN_PREFIX_RE.match(request.path) if match: domain = domain or match.group(1) # query param overrides domain in path request = utils.Struct(get=request.get, path=match.group(2)) result = self.router.match(request) if result and domain: result[2]['domain'] = domain # add an extra 'domain' keyword argument return result
def testGatherMetadataWmsInvalid(self): self.maxDiff = None # An invalid WMS response that is valid XML. self.assertEquals({}, metadata_fetch.GatherMetadata( 'WMS', utils.Struct( status_code=200, headers=RESPONSE_HEADERS, content=INVALID_WMS_RESPONSE))['wms_layers'])
def loadPage(self, url, uri=None, method='GET', params=''): ''' load a web page via https ''' if not url: print 'Request URL undefined' if not uri: urlData = urlparse(url) url = urlData.netloc uri = urlData.path + '?' + urlData.query # prepare params, append to uri if params: params = urlencode(params) if method == 'GET': uri += ('?' if uri.find('?') == -1 else '&') + params params = '' # insert local cookies in request headers = { 'Cookie': '; '.join( [key + '=' + self.cookies[key] for key in self.cookies.keys()]) } if method == 'POST': headers["Content-type"] = "application/x-www-form-urlencoded" # print 'Request URL: %s:/%s > %s # %s' % (url, uri, unquote(params), headers["Cookie"]) # print 'url: ', url # print 'uri: ', uri # print 'params: ', unquote(params) # print 'Cookie: ', headers["Cookie"] conn = httplib.HTTPSConnection(url) conn.request(method, uri, params, headers) response = conn.getresponse() data = response.read() conn.close() # print 'Response : %s > %s' % (response.status, response.getheaders()) # print 'response status: ', response.status # print 'response headers: ', response.getheaders() result = utils.Struct(status=response.status, location=response.getheader('location', None), data=data) # update local cookies sk = Cookie.SimpleCookie(response.getheader("Set-Cookie", "")) for key in sk: self.cookies[key] = sk[key].value return result
def testGatherMetadataWmsValid(self): self.maxDiff = None # A valid WMS GetCapabilities response. self.assertEquals( METADATA_VALID_WMS_RESPONSE, metadata_fetch.GatherMetadata( 'WMS', utils.Struct(status_code=200, headers=RESPONSE_HEADERS, content=VALID_WMS_RESPONSE))['wms_layers']) # A response with a valid layer and several invalid Layers. self.assertEquals( METADATA_INVALID_LAYERS_WMS_RESPONSE, metadata_fetch.GatherMetadata( 'WMS', utils.Struct( status_code=200, headers=RESPONSE_HEADERS, content=INVALID_LAYERS_WMS_RESPONSE))['wms_layers'])
def check_consistency(atoms, bonds, angles, dihedrals, name='', allow_errors=False): for x in bonds+angles+dihedrals: # Compile all index types? index2s = tuple([a.type.index2 for a in x.atoms]) if not x.type: print 'No type for structure indices', tuple([a.type.index2 for a in x.atoms]), ':', tuple([a.element for a in x.atoms]), ': atoms', tuple([a.index for a in x.atoms]),'in file', name if isinstance(x,utils.Dihedral): x.type = utils.Struct(index2s=index2s, e=(0.0,0.0,0.0)) #no params for dihedral is OK, just give warning elif allow_errors: continue else: print 'Exit'; exit()
def testSetDetailsOnFilteredFeatures(self): config.Set('google_api_server_key', 'someFakeApiKey') # Simulate a successful fetch from Places API by setting up a fake urlfetch url_responses = {} helsinki_attrs = ['Listing by <a href="fakeurl1.com">FakeSite1</a>'] api_response_content = json.dumps({ 'status': 'OK', 'html_attributions': helsinki_attrs, 'result': { 'formatted_address': 'Street1', 'formatted_phone_number': '111-111-1111' } }) url = card.PLACES_API_DETAILS_URL + 'placeid=placeId1&key=someFakeApiKey' url_responses[url] = utils.Struct(content=api_response_content) columbus_attrs = ['Listing by <a href="fakeurl2.com">FakeSite2</a>'] api_response_content = json.dumps({ 'status': 'OK', 'html_attributions': columbus_attrs, 'result': { 'formatted_address': 'Street2', 'formatted_phone_number': '222-222-2222' } }) url = card.PLACES_API_DETAILS_URL + 'placeid=placeId2&key=someFakeApiKey' url_responses[url] = utils.Struct(content=api_response_content) self.mox.stubs.Set(urlfetch, 'fetch', lambda url, **kwargs: url_responses[url]) exp_features = [ ('Helsinki', '<div>Street1</div><div>111-111-1111</div>', helsinki_attrs), ('Columbus', '<div>Street2</div><div>222-222-2222</div>', columbus_attrs) ] features = PLACES_FEATURES[:] card.SetDetailsOnFilteredFeatures(features) self.assertEquals(exp_features, [(f.name, f.description_html, f.html_attrs) for f in features])
def get_repo_options(request, lang): """Returns a list of the names and titles of the launched repositories.""" options = [] for repo in model.Repo.list_launched(): titles = config.get_for_repo(repo, 'repo_titles', {}) default_title = (titles.values() or ['?'])[0] title = titles.get(lang, titles.get('en', default_title)) url = utils.get_repo_url(request, repo) test_mode = config.get_for_repo(repo, 'test_mode') options.append( utils.Struct(repo=repo, title=title, url=url, test_mode=test_mode)) return options
def read_opls_parameters(parameter_file='oplsaa.prm', pair_style='lj/cut'): """ Reads an opls parameter file, default with path 'oplsaa.prm'. """ if read_opls_parameters.atom_types is None: elements = {}; atom_types = []; bond_types = []; angle_types = []; dihedral_types = []; pair_types = [] for line in open(parameter_file): columns = line.split() if not columns: continue if columns[0]=='atom': m = re.match('atom +(\d+) +(\d+) +(\S+) +"([^"]+)" +(\d+) +(\S+) +(\d+)', line) atom_type = utils.Struct(index=int(m.group(1)), index2=int(m.group(2)), element_name=m.group(3), notes=m.group(4), element=int(m.group(5)), mass=float(m.group(6)), bond_count=int(m.group(7) ), style=pair_style ) if atom_type.element not in elements: elements[atom_type.element] = [] elements[atom_type.element].append(atom_type) if '(UA)' in atom_type.notes: atom_type.element = 0 #reject united-atom parameters atom_types.append(atom_type) elif columns[0]=='vdw': atom_types[int(columns[1])-1].vdw_r = max( float(columns[2]), 1.0) atom_types[int(columns[1])-1].vdw_e = max( float(columns[3]), 0.01) elif columns[0]=='charge': atom_types[int(columns[1])-1].charge = float(columns[2]) elif columns[0]=='bond': bond_types.append( utils.Struct(index2s=tuple([int(s) for s in columns[1:3]]),e=float(columns[3]),r=float(columns[4]),style='harmonic') ) elif columns[0]=='angle': angle_types.append( utils.Struct(index2s=tuple([int(s) for s in columns[1:4]]),e=float(columns[4]),angle=float(columns[5]),style='harmonic') ) elif columns[0]=='torsion': dihedral_types.append( utils.Struct(index2s=tuple([int(s) for s in columns[1:5]]),e=tuple([float(s) for s in columns[5::3]]),style='opls') ) if len(dihedral_types[-1].e)==3: dihedral_types[-1].e = dihedral_types[-1].e + (0.,) elif columns[0]=='pair_type': pass read_opls_parameters.elements = elements read_opls_parameters.atom_types = atom_types read_opls_parameters.bond_types = bond_types read_opls_parameters.angle_types = angle_types read_opls_parameters.dihedral_types = dihedral_types #read_opls_parameters.pair_types = pair_types return read_opls_parameters.elements, read_opls_parameters.atom_types, read_opls_parameters.bond_types, read_opls_parameters.angle_types, read_opls_parameters.dihedral_types#, read_opls_parameters.pair_types
def testFetchHttpError(self): # Simulate a 404 Not found error. self.mox.StubOutWithMock(urlfetch, 'fetch') urlfetch.fetch(SOURCE_URL, headers={}, deadline=30).AndReturn( utils.Struct(status_code=404, headers={}, content='Not found')) self.mox.ReplayAll() self.assertEquals({ 'fetch_error_occurred': True, 'fetch_status': 404 }, metadata_fetch.FetchAndUpdateMetadata(None, SOURCE_ADDRESS)) self.mox.VerifyAll()
def testGatherMetadataKmz(self): # KMZ containing valid KML. kmz = CreateZip([('doc.kml', SIMPLE_KML)]) self.assertEquals( { 'fetch_status': 200, 'fetch_length': len(kmz), 'fetch_last_modified': LAST_MODIFIED_STRING, 'fetch_etag': ETAG, 'update_time': LAST_MODIFIED_TIMESTAMP, 'length': len(kmz), 'md5_hash': hashlib.md5(kmz).hexdigest() }, metadata_fetch.GatherMetadata( 'KML', utils.Struct(status_code=200, headers=RESPONSE_HEADERS, content=kmz))) # KMZ containing valid KML with unsupported features. content = '<kml><Document><Placemark><Camera/></Placemark></Document></kml>' kmz = CreateZip([('doc.kml', content)]) self.assertEquals( { 'fetch_status': 200, 'fetch_length': len(kmz), 'fetch_last_modified': LAST_MODIFIED_STRING, 'fetch_etag': ETAG, 'update_time': LAST_MODIFIED_TIMESTAMP, 'length': len(kmz), 'md5_hash': hashlib.md5(kmz).hexdigest(), 'has_unsupported_kml': True }, metadata_fetch.GatherMetadata( 'KML', utils.Struct(status_code=200, headers=RESPONSE_HEADERS, content=kmz)))
def main(): ''' Train environment with custom observation and prediction ''' os.environ["WANDB_MODE"] = "dryrun" with open('parameters.yml', 'r') as conf: args = yaml.load(conf, Loader=yaml.FullLoader) if args["generic"]["enable_wandb"]: wandb.init(project='flatland-challenge', entity="wadaboa", config=args) wandb.tensorboard.patch(tensorboardX=False, pytorch=True) writer = SummaryWriter() args = utils.Struct(**args) train_agents(args, writer) writer.close()
def testGatherMetadataGeorss(self): # Valid GeoRSS. content = '<rss><channel><item></item></channel></rss>' self.assertEquals( { 'fetch_status': 200, 'fetch_length': len(content), 'fetch_last_modified': LAST_MODIFIED_STRING, 'fetch_etag': ETAG, 'update_time': LAST_MODIFIED_TIMESTAMP, 'length': len(content), 'md5_hash': hashlib.md5(content).hexdigest() }, metadata_fetch.GatherMetadata( 'GEORSS', utils.Struct(status_code=200, headers=RESPONSE_HEADERS, content=content))) # Valid GeoRSS with no features. content = '<rss><channel></channel></rss>' self.assertEquals( { 'fetch_status': 200, 'fetch_length': len(content), 'fetch_last_modified': LAST_MODIFIED_STRING, 'fetch_etag': ETAG, 'update_time': LAST_MODIFIED_TIMESTAMP, 'length': len(content), 'md5_hash': hashlib.md5(content).hexdigest(), 'has_no_features': True }, metadata_fetch.GatherMetadata( 'GEORSS', utils.Struct(status_code=200, headers=RESPONSE_HEADERS, content=content)))
def DoTest(pairs, expected_content): string_io = StringIO.StringIO() zip_file = zipfile.ZipFile(string_io, 'w') for name, content in pairs: zip_file.writestr(name, content) zip_file.close() self.mox.StubOutWithMock(urlfetch, 'fetch') urlfetch.fetch(url).AndReturn( utils.Struct(content=string_io.getvalue())) self.mox.ReplayAll() self.assertEquals(expected_content, GetLegendItems.GetKmlFromUrl(url)) self.mox.VerifyAll() self.mox.UnsetStubs()
def setup(self, request, *args, **kwargs): """See docs on BaseView.setup.""" # pylint: disable=attribute-defined-outside-init super(AdminBaseView, self).setup(request, *args, **kwargs) self.env.show_logo = True self.env.enable_javascript = True self.env.user = users.get_current_user() self.env.user_admin_permission = self._get_user_admin_permission() self.env.logout_url = users.create_logout_url( self.build_absolute_uri()) self.env.all_repo_options = [ utils.Struct(repo=repo, url=self.build_absolute_path('/%s/admin' % repo)) for repo in sorted(model.Repo.list()) ] self.xsrf_tool = utils.XsrfTool()
def get_feature_datas(directory, data_format): """Yield parsed feature data from the binary files in the given directory. Valid data_format values are 'proto' and 'json'. """ if data_format == 'proto': import proto.node_detection_pb2 import proto.util for f in os.listdir(directory): yield proto.util.read(os.path.join(directory, f)) elif data_format == 'json': for f in os.listdir(directory): yield utils.Struct( json.loads(open(os.path.join(directory, f)).read())) else: raise NotImplementedError( "Unknown data format {}.".format(data_format))
def get_params(self): """Gets parameter values out of the request. Subclasses that need additional values should override this function, with an implementation like this: return views.base.read_params( super(<Subclass>, self).get_params(), self.request, get_params={'x': validate_x, 'y': validate_y,}, post_params={'z': validate_z}) Returns: utils.Struct: A container with the values of CGI parameters used by this view. """ return read_params(utils.Struct(), self.request, get_params={'lang': utils.strip})
def repo_options(self): """This is different from env.repo_options because this contains all repositories including deactivated ones. This is defined as a property so that it is evaluated lazily only when necessary. """ try: return [ utils.Struct(repo=repo, url=utils.get_repo_url(self.request, repo) + '/admin') for repo in sorted(model.Repo.list()) ] except: # Logs the exception here because exceptions thrown during template # variable evaluation is silently ignored. Note that # logging.exception() logs the current exception by default. logging.exception('Exception thrown') return None
def testGatherMetadataInvalid(self): # Invalid XML syntax. content = '<blah' self.assertEquals( { 'fetch_status': 200, 'fetch_length': len(content), 'fetch_last_modified': LAST_MODIFIED_STRING, 'fetch_etag': ETAG, 'update_time': LAST_MODIFIED_TIMESTAMP, 'length': len(content), 'md5_hash': hashlib.md5(content).hexdigest(), 'ill_formed': True }, metadata_fetch.GatherMetadata( 'KML', utils.Struct(status_code=200, headers=RESPONSE_HEADERS, content=content)))
def get_first_feature_data(directory, data_format): """Read first feature data file to initialize position related functions. Valid data_format values are 'proto' and 'json'. """ if data_format == 'proto': import proto.node_detection_pb2 import proto.util return proto.util.read( os.path.join(directory, os.listdir(directory)[0])) elif data_format == 'json': return utils.Struct( json.loads( open(os.path.join(directory, os.listdir(directory)[0])).read())) else: raise NotImplementedError( "Unknown data format {}.".format(data_format))
def testFetchFirstTime(self): # Simulate a normal, successful fetch of a document for the first time. self.mox.StubOutWithMock(urlfetch, 'fetch') urlfetch.fetch(SOURCE_URL, headers={}, deadline=30).AndReturn( utils.Struct(status_code=200, headers=RESPONSE_HEADERS, content=SIMPLE_KML)) self.mox.ReplayAll() self.assertEquals( { 'fetch_status': 200, 'fetch_length': len(SIMPLE_KML), 'fetch_last_modified': LAST_MODIFIED_STRING, 'fetch_etag': ETAG, 'update_time': LAST_MODIFIED_TIMESTAMP, 'length': len(SIMPLE_KML), 'md5_hash': hashlib.md5(SIMPLE_KML).hexdigest() }, metadata_fetch.FetchAndUpdateMetadata(None, SOURCE_ADDRESS)) self.mox.VerifyAll()
def setup_env(request): """Constructs the 'env' object, which contains various template variables that are commonly used by most handlers.""" env = utils.Struct() env.repo, env.action = get_repo_and_action(request) env.config = config.Configuration(env.repo or '*') env.analytics_id = config.get('analytics_id') env.amp_gtm_id = config.get('amp_gtm_id') env.maps_api_key = config.get('maps_api_key') # Internationalization-related stuff. env.charset = select_charset(request) env.lang = select_lang(request, env.config) env.rtl = env.lang in const.LANGUAGES_BIDI env.virtual_keyboard_layout = const.VIRTUAL_KEYBOARD_LAYOUTS.get(env.lang) # Used for parsing query params. This must be done before accessing any # query params which may have multi-byte value, such as "given_name" below # in this function. request.charset = env.charset # Determine the resource bundle to use. env.default_resource_bundle = config.get('default_resource_bundle', '1') env.resource_bundle = (request.cookies.get('resource_bundle', '') or env.default_resource_bundle) # Information about the request. env.url = utils.set_url_param(request.url, 'lang', env.lang) env.scheme, env.netloc, env.path, _, _ = urlparse.urlsplit(request.url) env.force_https = False env.domain = env.netloc.split(':')[0] env.global_url = utils.get_repo_url(request, 'global') # Commonly used information that's rendered or localized for templates. env.language_options = get_language_options(request, env.config, env.lang) env.repo_options = get_repo_options(request, env.lang) env.expiry_options = [ utils.Struct(value=value, text=const.PERSON_EXPIRY_TEXT[value]) for value in sorted(const.PERSON_EXPIRY_TEXT.keys(), key=int) ] env.status_options = [ utils.Struct(value=value, text=const.NOTE_STATUS_TEXT[value]) for value in pfif.NOTE_STATUS_VALUES if (value != 'believed_dead' or not env.config or env.config.allow_believed_dead_via_ui) ] env.hidden_input_tags_for_preserved_query_params = ( get_hidden_input_tags_for_preserved_query_params(request)) ui_param = request.get('ui', '').strip().lower() # Interprets "small" and "style" parameters for backward compatibility. # TODO(ichikawa): Delete these in near future when we decide to drop # support of these parameters. small_param = request.get('small', '').strip().lower() style_param = request.get('style', '').strip().lower() if not ui_param and small_param == 'yes': ui_param = 'small' elif not ui_param and style_param: ui_param = style_param if ui_param: env.ui = ui_param elif user_agents.is_jp_tier2_mobile_phone(request): env.ui = 'light' else: env.ui = 'default' # UI configurations. # # Enables features which require JavaScript. env.enable_javascript = True # Enables operations which requires Captcha. env.enable_captcha = True # Enables photo upload. env.enable_photo_upload = True # Enables to flag/unflag notes as spam, and to reveal spam notes. env.enable_spam_ops = True # Enables duplicate marking mode. env.enable_dup_mode = True # Shows a logo on top of the page. env.show_logo = True # Shows language menu. env.show_language_menu = True # Uses short labels for buttons. env.use_short_buttons = False # Optional "target" attribute for links to non-small pages. env.target_attr = '' # Shows record IDs in the results page. env.show_record_ids_in_results = True # Shows non AMP HTML pages by default. env.amp = False if env.ui == 'small': env.show_logo = False env.target_attr = ' target="_blank" ' elif env.ui == 'light': # Disables features which requires JavaScript. Some feature phones # doesn't support JavaScript. env.enable_javascript = False # Disables operations which requires Captcha because Captcha requires # JavaScript. env.enable_captcha = False # Uploading is often not supported in feature phones. env.enable_photo_upload = False # Disables spam operations because it requires JavaScript and # supporting more pages on ui=light. env.enable_spam_ops = False # Disables duplicate marking mode because it doesn't support # small screens and it requires JavaScript. env.enable_dup_mode = False # Hides the logo on the top to save the space. Also, the logo links # to the global page which doesn't support small screens. env.show_logo = False # Hides language menu because the menu in the current position is # annoying in feature phones. # TODO(ichikawa): Consider layout of the language menu. env.show_language_menu = False # Too long buttons are not fully shown in some feature phones. env.use_short_buttons = True # To make it simple. env.show_record_ids_in_results = False env.back_chevron = u'\xab' back_chevron_in_charset = True try: env.back_chevron.encode(env.charset) except UnicodeEncodeError: # u'\xab' is not in the charset (e.g. Shift_JIS). back_chevron_in_charset = False if not back_chevron_in_charset or env.ui == 'light': # Use ASCII characters on ui=light too because some feature phones # support UTF-8 but don't render UTF-8 symbols such as u'\xab'. env.back_chevron = u'<<' env.enable_maps = (env.enable_javascript and not env.config.zero_rating_mode and env.maps_api_key) env.enable_analytics = (env.enable_javascript and not env.config.zero_rating_mode and env.analytics_id) env.enable_translate = (env.enable_javascript and not env.config.zero_rating_mode and env.config.translate_api_key) env.admin = AdminEnv(request) # Repo-specific information. if env.repo: # repo_url is the root URL for the repository. env.repo_url = utils.get_repo_url(request, env.repo) # start_url is like repo_url but preserves parameters such as 'ui'. env.start_url = utils.get_url(request, env.repo, '') # URL of the link in the heading. The link on ui=small links to the # normal UI. env.repo_title_url = (env.repo_url if env.ui == 'small' else env.start_url) # URL to force default UI. Note that we show ui=light version in some # user agents when ui parameter is not specified. env.default_ui_url = utils.get_url(request, env.repo, '', ui='default') env.repo_path = urlparse.urlsplit(env.repo_url)[2] env.repo_title = get_localized_message(env.config.repo_titles, env.lang, '?') env.start_page_custom_html = get_localized_message( env.config.start_page_custom_htmls, env.lang, '') env.results_page_custom_html = get_localized_message( env.config.results_page_custom_htmls, env.lang, '') env.view_page_custom_html = get_localized_message( env.config.view_page_custom_htmls, env.lang, '') env.seek_query_form_custom_html = get_localized_message( env.config.seek_query_form_custom_htmls, env.lang, '') env.footer_custom_html = get_localized_message( env.config.footer_custom_htmls, env.lang, '') # If the repository is deactivated, we should not show test mode # notification. env.repo_test_mode = (env.config.test_mode and not env.config.deactivated) env.force_https = env.config.force_https env.params_full_name = request.get('full_name', '').strip() if not env.params_full_name: # Preformat the name from 'given_name' and 'family_name' parameters. given_name = request.get('given_name', '').strip() family_name = request.get('family_name', '').strip() env.params_full_name = utils.get_full_name(given_name, family_name, env.config) return env