def fields(self): username = ew.TextField( name='username', label='Desired Username', validator=plugin.AuthenticationProvider.get( None).username_validator(), ) fields = [ ew.TextField(name='display_name', label='Displayed Name', validator=V.UnicodeString(not_empty=True)), username, ] if asbool(config.get('auth.require_email_addr', False)): fields.append( ew.TextField(name='email', label='Your e-mail', validator=fev.Email(not_empty=True))) fields += [ ew.PasswordField( name='pw', label='New Password', attrs=dict( minlength=asint(tg.config.get('auth.min_password_len', 6)), maxlength=asint(tg.config.get('auth.max_password_len', 30))), validator=V.UnicodeString( not_empty=True, min=asint(tg.config.get('auth.min_password_len', 6)), max=asint(tg.config.get('auth.max_password_len', 30)))), ew.PasswordField(name='pw2', label='New Password (again)', validator=V.UnicodeString(not_empty=True)), ] return fields
def fields(self): return [ ew.PasswordField( name='oldpw', label='Old Password', validator=V.UnicodeString(not_empty=True), attrs=dict( required=True, autocomplete='current-password', ), ), ew.PasswordField( name='pw', label='New Password', attrs=dict( minlength=asint(tg.config.get('auth.min_password_len', 6)), maxlength=asint(tg.config.get('auth.max_password_len', 30)), required=True, autocomplete='new-password', ), validator=V.UnicodeString( not_empty=True, min=asint(tg.config.get('auth.min_password_len', 6)), max=asint(tg.config.get('auth.max_password_len', 30)))), ew.PasswordField( name='pw2', label='New Password (again)', validator=V.UnicodeString(not_empty=True), attrs=dict( required=True, autocomplete='new-password', ), ), ew.HiddenField(name='return_to'), ]
def highlight(self, text, lexer=None, filename=None): if not text: if lexer == 'diff': return Markup('<em>File contents unchanged</em>') return Markup('<em>Empty file</em>') # Don't use line numbers for diff highlight's, as per [#1484] if lexer == 'diff': formatter = pygments.formatters.HtmlFormatter(cssclass='codehilite', linenos=False) else: formatter = self.pygments_formatter text = h.really_unicode(text) if lexer is None: if len(text) < asint(config.get('scm.view.max_syntax_highlight_bytes', 500000)): try: lexer = pygments.lexers.get_lexer_for_filename(filename, encoding='chardet') except pygments.util.ClassNotFound: pass else: lexer = pygments.lexers.get_lexer_by_name(lexer, encoding='chardet') if lexer is None or len(text) >= asint(config.get('scm.view.max_syntax_highlight_bytes', 500000)): # no highlighting, but we should escape, encode, and wrap it in # a <pre> text = cgi.escape(text) return Markup('<pre>' + text + '</pre>') else: return Markup(pygments.highlight(text, lexer, formatter))
def fields(self): return [ ew.PasswordField( name='oldpw', label='Old Password', validator=fev.UnicodeString(not_empty=True), attrs=dict( required=True, autocomplete='current-password', ), ), ew.PasswordField( name='pw', label='New Password', attrs=dict( minlength=asint(tg.config.get('auth.min_password_len', 6)), maxlength=asint(tg.config.get('auth.max_password_len', 30)), required=True, autocomplete='new-password', ), validator=fev.UnicodeString( not_empty=True, min=asint(tg.config.get('auth.min_password_len', 6)), max=asint(tg.config.get('auth.max_password_len', 30)))), ew.PasswordField( name='pw2', label='New Password (again)', validator=fev.UnicodeString(not_empty=True), attrs=dict( required=True, autocomplete='new-password', ), ), ew.HiddenField(name='return_to'), ]
def fields(self): username = ew.TextField(name='username', label='Desired Username', validator=fev.Regex(h.re_project_name)) username.validator._messages['invalid'] = ( 'Usernames must include only small letters, numbers, and dashes.' ' They must also start with a letter and be at least 3 characters' ' long.') fields = [ ew.TextField(name='display_name', label='Displayed Name', validator=fev.UnicodeString(not_empty=True)), username, ] if asbool(config.get('auth.require_email_addr', False)): fields.append( ew.TextField(name='email', label='Your e-mail', validator=fev.Email(not_empty=True))) fields += [ ew.PasswordField( name='pw', label='New Password', validator=fev.UnicodeString( not_empty=True, min=asint(tg.config.get('auth.min_password_len', 6)), max=asint(tg.config.get('auth.max_password_len', 30)))), ew.PasswordField(name='pw2', label='New Password (again)', validator=fev.UnicodeString(not_empty=True)), ] return fields
def _get_activities_data(self, **kw): activity_enabled = config.get('activitystream.enabled', False) activity_enabled = request.cookies.get( 'activitystream.enabled', activity_enabled) activity_enabled = asbool(activity_enabled) if not activity_enabled: raise exc.HTTPNotFound() c.follow_toggle = W.follow_toggle c.page_list = W.page_list if c.project.is_user_project: followee = c.project.user_project_of actor_only = followee != c.user else: followee = c.project actor_only = False following = g.director.is_connected(c.user, followee) timeline = g.director.get_timeline(followee, page=kw.get('page', 0), limit=kw.get('limit', 100), actor_only=actor_only, filter_func=perm_check(c.user)) page = asint(kw.get('page', 0)) limit = asint(kw.get('limit', 100)) return dict( followee=followee, following=following, timeline=timeline, page=page, limit=limit, has_more=len(timeline) == limit)
def read_file(self, user, autocreate=False): if autocreate: userdir = os.path.dirname(self.config_file(user)) if not os.path.exists(userdir): os.makedirs(userdir, 0700) try: with open(self.config_file(user)) as f: return GoogleAuthenticatorFile.load(f.read()) except IOError as e: if e.errno == errno.ENOENT: # file doesn't exist if autocreate: gaf = GoogleAuthenticatorFile() gaf.options['RATE_LIMIT'] = '{} {}'.format( asint(config.get('auth.multifactor.rate_limit.num', 3)), asint( config.get('auth.multifactor.rate_limit.time', 30))) gaf.options['DISALLOW_REUSE'] = None gaf.options['TOTP_AUTH'] = None return gaf else: return None else: raise
def retry_cmd(cmd, fail_count=0): max_fail = asint(tg.config.get('scm.import.retry_count', 50)) returncode = -1 while returncode != 0 and fail_count < max_fail: stdout, stderr, returncode = self.check_call(cmd, fail_on_error=False) if returncode != 0: fail_count += 1 log.info('Attempt %s. Error running %s Details:\n%s', fail_count, cmd, stderr) time.sleep(asint(tg.config.get('scm.import.retry_sleep_secs', 5))) if fail_count == max_fail: raise SVNCalledProcessError(cmd, returncode, stdout, stderr) return fail_count
def enforce_rate_limit(self, user): prev_attempts = user.get_tool_data('allura', 'multifactor_attempts') or [] num_allowed = asint(config.get('auth.multifactor.rate_limit.num', 3)) time_allowed = asint(config.get('auth.multifactor.rate_limit.time', 30)) ok, attempts_in_limit = check_rate_limit(num_allowed, time_allowed, prev_attempts) user.set_tool_data('allura', multifactor_attempts=attempts_in_limit) if not ok: raise MultifactorRateLimitError
class DisassemblerController(BaseController): default_depth = asint(config.get('disassembler.default_depth', '2')) default_list_cull = asint(config.get('disassembler.default_dict_cull', '5')) default_dict_cull = asint(config.get('disassembler.default_list_cull', '5')) @classmethod def app(cls, slug, asset): game = get_game_by_slug(slug) if not game: abort(404, 'Invalid game: %s' % slug) try: depth = int(request.params.get('depth', cls.default_depth)) list_cull = int(request.params.get('list_cull', cls.default_list_cull)) dict_cull = int(request.params.get('dict_cull', cls.default_dict_cull)) expand = bool(request.params.get('expand', False)) userdata = int(request.params.get('userdata', 0)) except TypeError as e: abort(404, 'Invalid parameter: %s' % str(e)) depth = max(1, depth) list_cull = max(1, list_cull) dict_cull = max(1, dict_cull) node = request.params.get('node', None) if node: try: (json_asset, filename) = get_asset(asset, slug, userdata) link_prefix = '/disassemble/%s' % slug disassembler = Disassembler(Json2htmlRenderer(), list_cull, dict_cull, depth, link_prefix) response.status = 200 Compactor.disable(request) return disassembler.mark_up_asset({'root': json_asset}, expand, node) except IOError as e: abort(404, str(e)) except json.JSONDecodeError as e: _, ext = os.path.splitext(filename) if ext == '.json': abort(404, 'Failed decoding JSON asset: %s\nError was: %s' % (asset, str(e))) else: abort(404, 'Currently unable to disassemble this asset: %s' % asset) else: c.game = game local_context = { 'asset': asset, 'list_cull': list_cull, 'dict_cull': dict_cull, 'depth': depth, 'userdata': userdata } return render('/disassembler/disassembler.html', local_context)
def fields(self): return [ ew.PasswordField( name='pw', label='New Password', validator=fev.UnicodeString( not_empty=True, min=asint(tg.config.get('auth.min_password_len', 6)), max=asint(tg.config.get('auth.max_password_len', 30)))), ew.PasswordField(name='pw2', label='New Password (again)', validator=fev.UnicodeString(not_empty=True)), ]
def _connect(self): if asbool(tg.config.get('smtp_ssl', False)): smtp_client = smtplib.SMTP_SSL( tg.config.get('smtp_server', 'localhost'), asint(tg.config.get('smtp_port', 25))) else: smtp_client = smtplib.SMTP( tg.config.get('smtp_server', 'localhost'), asint(tg.config.get('smtp_port', 465))) if tg.config.get('smtp_user', None): smtp_client.login(tg.config['smtp_user'], tg.config['smtp_password']) if asbool(tg.config.get('smtp_tls', False)): smtp_client.starttls() self._client = smtp_client
def fields(self): return [ ew.PasswordField( name='pw', label='New Password', validator=fev.UnicodeString( not_empty=True, min=asint(tg.config.get('auth.min_password_len', 6)), max=asint(tg.config.get('auth.max_password_len', 30)))), ew.PasswordField( name='pw2', label='New Password (again)', validator=fev.UnicodeString(not_empty=True)), ]
def Totp(self, key): # simple constructor helper if not key: key = os.urandom(20) # == 160 bytes which is recommended totp = TOTP(key, asint(config.get('auth.multifactor.totp.length', 6)), SHA1(), asint(config.get('auth.multifactor.totp.time', 30)), backend=default_backend()) totp.key = key # for convenience, else you have to use `totp._hotp._key` return totp
def show(self, id, format='html'): c.page_instance = get_entity_or_abort(model.Instance, id) require.instance.show(c.page_instance) if format == 'json': return render_json(c.page_instance) if format == 'rss': return self.activity(id, format) if c.page_instance != c.instance: redirect(h.entity_url(c.page_instance)) c.tile = tiles.instance.InstanceTile(c.page_instance) proposals = model.Proposal.all(instance=c.page_instance) c.new_proposals_pager = pager.proposals( proposals, size=7, enable_sorts=False, enable_pages=False, default_sort=sorting.entity_newest) c.sidebar_delegations = (_('Delegations are enabled.') if c.page_instance.allow_delegate else _('Delegations are disabled.')) #pages = model.Page.all(instance=c.page_instance, # functions=[model.Page.NORM]) #c.top_pages_pager = pager.pages( # pages, size=7, enable_sorts=False, # enable_pages=False, default_sort=sorting.norm_selections) #tags = model.Tag.popular_tags(limit=40) #c.tags = sorted(text.tag_cloud_normalize(tags), # key=lambda (k, c, v): k.name) if asbool(config.get('adhocracy.show_instance_overview_milestones')) \ and c.page_instance.milestones: number = asint(config.get( 'adhocracy.number_instance_overview_milestones', 3)) milestones = model.Milestone.all_future_q( instance=c.page_instance).limit(number).all() c.next_milestones_pager = pager.milestones( milestones, size=number, enable_sorts=False, enable_pages=False, default_sort=sorting.milestone_time) events = model.Event.find_by_instance(c.page_instance, limit=3) c.events_pager = pager.events(events, enable_pages=False, enable_sorts=False) c.stats = { 'comments': model.Comment.all_q().count(), 'proposals': model.Proposal.all_q( instance=c.page_instance).count(), 'members': model.Membership.all_q().count() } c.tutorial_intro = _('tutorial_instance_show_intro') c.tutorial = 'instance_show' return render("/instance/show.html")
def handle_paging(self, limit, page, default=25): limit = self.manage_paging_preference(limit, default) limit = max(int(limit), 1) limit = min(limit, asint(config.get('limit_param_max', 500))) page = max(int(page), 0) start = page * int(limit) return (limit, page, start)
def index(self, format='html'): require.proposal.index() if c.instance: redirect(h.entity_url(c.instance)) instances_in_root = asint( config.get('adhocracy.startpage.instances.list_length', 0)) if instances_in_root > 0: c.instances = model.Instance.all(limit=instances_in_root) elif instances_in_root == -1: c.instances = model.Instance.all() c.page = StaticPage('index') #query = self.form_result.get('proposals_q') #proposals = libsearch.query.run(query, # entity_type=model.Proposal)[:10] c.milestones = model.Milestone.all() #c.proposals_pager = pager.proposals(proposals) #c.proposals = c.proposals_pager.here() c.stats_global = { "members": model.User.all_q().count(), "comments": model.Comment.all_q().count(), "proposals": model.Proposal.all_q().count(), "votes": model.Vote.all_q().count(), } if format == 'rss': return EventController().all(format='rss') return render('index.html')
def index(self, format='html'): require.proposal.index() if c.instance: redirect(h.entity_url(c.instance)) instances_in_root = asint(config.get('adhocracy.startpage.instances.list_length', 0)) if instances_in_root > 0: c.instances = model.Instance.all(limit=instances_in_root) elif instances_in_root == -1: c.instances = model.Instance.all() c.page = StaticPage('index') #query = self.form_result.get('proposals_q') #proposals = libsearch.query.run(query, # entity_type=model.Proposal)[:10] c.milestones = model.Milestone.all() #c.proposals_pager = pager.proposals(proposals) #c.proposals = c.proposals_pager.here() c.stats_global = { "members": model.User.all_q().count(), "comments": model.Comment.all_q().count(), "proposals": model.Proposal.all_q().count(), "votes": model.Vote.all_q().count(), } if format == 'rss': return EventController().all(format='rss') return render('index.html')
def last_commit_ids(self, commit, paths): ''' Return a mapping {path: commit_id} of the _id of the last commit to touch each path, starting from the given commit. Chunks the set of paths based on lcd_thread_chunk_size and runs each chunk (if more than one) in a separate thread. Each thread will call :meth:`_get_last_commit` to get the commit ID and list of changed files for the last commit to touch any file in a given chunk. ''' if not paths: return {} timeout = float(tg.config.get('lcd_timeout', 60)) start_time = time() paths = list(set(paths)) # remove dupes result = {} # will be appended to from each thread chunks = Queue() lcd_chunk_size = asint(tg.config.get('lcd_thread_chunk_size', 10)) num_threads = 0 for s in range(0, len(paths), lcd_chunk_size): chunks.put(paths[s:s+lcd_chunk_size]) num_threads += 1 def get_ids(): paths = set(chunks.get()) try: commit_id = commit._id while paths and commit_id: if time() - start_time >= timeout: log.error('last_commit_ids timeout for %s on %s', commit._id, ', '.join(paths)) break commit_id, changes = self._get_last_commit(commit._id, paths) if commit_id is None: break changed = prefix_paths_union(paths, changes) for path in changed: result[path] = commit_id paths -= changed except Exception as e: log.exception('Error in SCM thread: %s', e) finally: chunks.task_done() if num_threads == 1: get_ids() else: for i in range(num_threads): t = Thread(target=get_ids) t.start() # reimplement chunks.join() but with a timeout # see: http://bugs.python.org/issue9634 # (giving threads a bit of extra cleanup time in case they timeout) chunks.all_tasks_done.acquire() try: endtime = time() + timeout + 0.5 while chunks.unfinished_tasks and endtime > time(): chunks.all_tasks_done.wait(endtime - time()) finally: chunks.all_tasks_done.release() return result
def __init__(self, **kw): for k, v in kw.items(): setattr(self, k, v) # Set boolean properties for prop in ( "inject_resources", "serve_resources", "serve_controllers", "params_as_vars", "strict_engine_selection", "debug", ): setattr(self, prop, asbool(getattr(self, prop))) # Set integer properties for prop in ("res_max_age", "bufsize"): setattr(self, prop, asint(getattr(self, prop))) if self.auto_reload_templates is None: self.auto_reload_templates = self.debug self.available_rendering_engines = {} for e in iter_entry_points("python.templating.engines"): if not self.strict_engine_selection or e.name in self.preferred_rendering_engines: try: self.available_rendering_engines[e.name] = e.load() except DistributionNotFound: pass # test to see if the rendering engines are available for the preferred engines selected for engine_name in self.preferred_rendering_engines: if engine_name not in self.available_rendering_engines: self.preferred_rendering_engines.remove(engine_name)
def configure(self, **kw): """Handle POST to delete the Application or update its ``config.options``. """ with h.push_config(c, app=self.app): require_access(self.app, 'configure') is_admin = self.app.config.tool_name == 'admin' if kw.pop('delete', False): if is_admin: flash('Cannot delete the admin tool, sorry....') redirect('.') c.project.uninstall_app(self.app.config.options.mount_point) redirect('..') for opt in self.app.config_options: if opt in Application.config_options: # skip base options (mount_point, mount_label, ordinal) continue val = kw.get(opt.name, '') if opt.ming_type == bool: val = asbool(val or False) elif opt.ming_type == int: val = asint(val or 0) self.app.config.options[opt.name] = val if is_admin: # possibly moving admin mount point redirect('/' + c.project._id + self.app.config.options.mount_point + '/' + self.app.config.options.mount_point + '/') else: redirect(request.referer)
def view_rebuild(request): """ Rebuild view. """ root = request.registry.settings.get('images_root') path = request.params.get('d', '') what = request.params.get('w', 'thumbs_meta') full_path = "%s/%s" % (root, path) format_ = request.registry.settings.get('resize_format') quality = asint(request.registry.settings.get('resize_quality')) sizes = aslist(request.registry.settings.get('image_sizes')) if not path or '..' in path: return HTTPForbidden() jobs_created = [] for c in os.listdir(full_path): f = "%s/%s" % (full_path, c) if utils.image.is_valid_image(f): options={'what': what, 'path': f, 'format': format_, 'quality': quality, 'sizes': sizes} if not request.db.get_job('rebuild-%s-%s' % (what,f,)): request.db.save_job('rebuild-%s-%s' % (what,f,), options) jobs_created.append('rebuild-%s-%s' % (what,f,)) Thread(target=utils.image.process_resize_jobs, args=(request.db, jobs_created)).start() return {'jobs': jobs_created}
def commit_browser_data(self, start=None, limit=None, **kw): data = { 'commits': [], 'next_column': 1, 'max_row': 0, 'built_tree': {}, 'next_commit': None, } try: limit = asint(limit) except ValueError as e: pass limit = limit or 100 for i, commit in enumerate(c.app.repo.log(revs=start, id_only=False, page_size=limit+1)): if i >= limit: data['next_commit'] = str(commit['id']) break data['commits'].append(str(commit['id'])) data['built_tree'][commit['id']] = { 'column': 0, 'parents': map(str, commit['parents']), 'short_id': '[r%s]' % commit['id'], 'message': commit['message'], 'oid': str(commit['id']), 'row': i, 'url': c.app.repo.url_for_commit(commit['id']), } data['max_row'] = len(data['commits']) - 1 return data
def setup_genshi_renderer(self): """Setup a renderer and loader for Genshi templates. Override this to customize the way that the internationalization filter, template loader """ from tg.render import RenderGenshi from genshi.filters import Translator def template_loaded(template): """Plug-in our i18n function to Genshi, once the template is loaded. This function will be called by the Genshi TemplateLoader after loading the template. """ translator = Translator(ugettext) template.filters.insert(0, translator) if hasattr(template, 'add_directives'): template.add_directives(Translator.NAMESPACE, translator) if config.get('use_dotted_templatenames', True): from tg.dottednames.genshi_lookup \ import GenshiTemplateLoader as TemplateLoader else: from genshi.template import TemplateLoader loader = TemplateLoader(search_path=self.paths.templates, max_cache_size=asint(self.get('genshi.max_cache_size', 30)), auto_reload=self.auto_reload_templates, callback=template_loaded) self.render_functions.genshi = RenderGenshi(loader)
def serve_file(fp, filename, content_type, last_modified=None, cache_expires=None, size=None, embed=True, etag=None): '''Sets the response headers and serves as a wsgi iter''' if not etag and filename and last_modified: etag = u'{0}?{1}'.format(filename, last_modified).encode('utf-8') if etag: etag_cache(etag) pylons.response.headers['Content-Type'] = '' pylons.response.content_type = content_type.encode('utf-8') pylons.response.cache_expires = cache_expires or asint( tg.config.get('files_expires_header_secs', 60 * 60)) pylons.response.last_modified = last_modified if size: pylons.response.content_length = size if 'Pragma' in pylons.response.headers: del pylons.response.headers['Pragma'] if 'Cache-Control' in pylons.response.headers: del pylons.response.headers['Cache-Control'] if not embed: pylons.response.headers.add( 'Content-Disposition', 'attachment;filename="%s"' % filename.encode('utf-8')) # http://code.google.com/p/modwsgi/wiki/FileWrapperExtension block_size = 4096 if 'wsgi.file_wrapper' in tg.request.environ: return tg.request.environ['wsgi.file_wrapper'](fp, block_size) else: return iter(lambda: fp.read(block_size), '')
def command(self): faulthandler.enable() self.basic_setup() MailServer((tg.config.get('forgemail.host', '0.0.0.0'), asint(tg.config.get('forgemail.port', 8825))), None) asyncore.loop()
def connect_database_from_config(app_config): """Connect to the main database, take config from app_config""" port = app_config.get('db_port') if port: port = asint(port) connection = mongokit.Connection(app_config.get('db_host'), port) return connection
def worker(self): from allura import model as M name = '%s pid %s' % (os.uname()[1], os.getpid()) if self.options.dry_run: return wsgi_app = loadapp('config:%s#task' % self.args[0],relative_to=os.getcwd()) poll_interval = asint(pylons.config.get('monq.poll_interval', 10)) def start_response(status, headers, exc_info=None): pass def waitfunc_amqp(): try: return pylons.g.amq_conn.queue.get(timeout=poll_interval) except Queue.Empty: return None def waitfunc_noq(): time.sleep(poll_interval) if pylons.g.amq_conn: waitfunc = waitfunc_amqp else: waitfunc = waitfunc_noq while True: if pylons.g.amq_conn: pylons.g.amq_conn.reset() try: while True: task = M.MonQTask.get(process=name, waitfunc=waitfunc) # Build the (fake) request r = Request.blank('/--%s--/' % task.task_name, dict(task=task)) list(wsgi_app(r.environ, start_response)) except Exception: base.log.exception('Taskd, restart in 10s') time.sleep(10)
def configure(self, **kw): """Handle POST to delete the Application or update its ``config.options``. """ with h.push_config(c, app=self.app): require_access(self.app, 'configure') is_admin = self.app.config.tool_name == 'admin' if kw.pop('delete', False): if is_admin: flash('Cannot delete the admin tool, sorry....') redirect('.') c.project.uninstall_app(self.app.config.options.mount_point) redirect('..') for opt in self.app.config_options: if opt in Application.config_options: continue # skip base options (mount_point, mount_label, ordinal) val = kw.get(opt.name, '') if opt.ming_type == bool: val = asbool(val or False) elif opt.ming_type == int: val = asint(val or 0) self.app.config.options[opt.name] = val if is_admin: # possibly moving admin mount point redirect('/' + c.project._id + self.app.config.options.mount_point + '/' + self.app.config.options.mount_point + '/') else: redirect(request.referer)
def connect_database_from_config(app_config): """Connect to the main database, take config from app_config""" port = app_config.get('db_port') if port: port = asint(port) connection = mongokit.Connection( app_config.get('db_host'), port) return connection
def __init__(self, app, config): self.app = app self.compress_level = asint(config.get('gzip.compress_level', '5')) self.compress = set(aslist(config.get('gzip.compress', ''), ',', strip=True)) self.do_not_compress = set(aslist(config.get('gzip.do_not_compress', ''), ',', strip=True)) for m in (self.compress | self.do_not_compress): if mimetypes.guess_extension(m) is None: LOG.warning('Unrecognised mimetype in server configuration: %s', m) self.cache_dir = normpath(config.get('deploy.cache_dir', None))
def compression(app, config): if not defaultbool(config.get('web.compress', False), ['paste']): return app # Enable compression if requested. log.debug("Enabling HTTP compression.") from paste.gzipper import middleware as GzipMiddleware return GzipMiddleware(app, compress_level=asint(config.get('web.compress.level', 6)))
def _connect(self): if asbool(tg.config.get("smtp_ssl", False)): smtp_client = smtplib.SMTP_SSL( tg.config.get("smtp_server", "localhost"), asint(tg.config.get("smtp_port", 25)), timeout=float(tg.config.get("smtp_timeout", 10)), ) else: smtp_client = smtplib.SMTP( tg.config.get("smtp_server", "localhost"), asint(tg.config.get("smtp_port", 465)), timeout=float(tg.config.get("smtp_timeout", 10)), ) if tg.config.get("smtp_user", None): smtp_client.login(tg.config["smtp_user"], tg.config["smtp_password"]) if asbool(tg.config.get("smtp_tls", False)): smtp_client.starttls() self._client = smtp_client
def enforce_limit(self): '''Returns False if limit is reached, otherwise True''' if self.last_sent is None: return True now = dt.datetime.utcnow() config_type = self.type.replace('-', '_') limit = asint(config.get('webhook.%s.limit' % config_type, 30)) if (now - self.last_sent) > dt.timedelta(seconds=limit): return True return False
def index(self, format='html'): require.proposal.index() if c.instance: redirect(h.entity_url(c.instance)) instances_in_root = asint( config.get('adhocracy.startpage.instances.list_length', 0)) if instances_in_root > 0: c.instances = model.Instance.all(limit=instances_in_root) elif instances_in_root == -1: c.instances = model.Instance.all() c.page = get_static_page('index') proposals_number = asint( config.get('adhocracy.startpage.proposals.list_length', 0)) if proposals_number > 0: proposals = model.Proposal.all_q()\ .join(model.Instance).filter(not_( model.Instance.key.in_(model.Instance.SPECIAL_KEYS)))\ .order_by(model.Proposal.create_time.desc()) c.new_proposals_pager = pager.proposals( proposals, size=proposals_number, default_sort=sorting.entity_newest, enable_pages=False, enable_sorts=False) else: c.new_proposals_pager = None if asbool(config.get('adhocracy.show_stats_on_frontpage', 'true')): c.stats_global = { "members": model.User.all_q().count(), "comments": model.Comment.all_q().count(), "proposals": model.Proposal.all_q().count(), "votes": model.Vote.all_q().count(), } if format == 'rss': return EventController().all(format='rss') return render('index.html')
def __call__(self, environ, start_response): """Invoke the Controller""" c.instance = model.instance_filter.get_instance() if c.instance is not None: # setup a global variable to mark the current item in # the global navigation c.active_global_nav = 'instances' else: c.active_global_nav = 'home' c.user = environ.get('repoze.who.identity', {}).get('user') # make sure we're not using a detached user object if c.user: c.user = model.meta.Session.merge(c.user) if c.user and (c.user.banned or c.user.delete_time): c.user = None c.active_controller = request.environ.get('pylons.routes_dict')\ .get('controller') c.debug = asbool(config.get('debug')) i18n.handle_request() monitor_page_time_interval = asint( config.get('adhocracy.monitor_page_time_interval', -1)) if monitor_page_time_interval > 0: c.monitor_page_time_interval = monitor_page_time_interval c.monitor_page_time_url = h.base_url('/stats/on_page') if asbool(config.get('adhocracy.monitor_external_links', 'False')): c.monitor_external_links_url = h.base_url('/stats/record_external') h.add_rss("%s News" % h.site.name(), h.base_url('/feed.rss', None)) if c.instance: h.add_rss("%s News" % c.instance.label, h.base_url('/instance/%s.rss' % c.instance.key)) h.add_meta("description", _("A liquid democracy platform for making decisions in " "distributed, open groups by cooperatively creating " "proposals and voting on them to establish their " "support.")) h.add_meta("keywords", _("adhocracy, direct democracy, liquid democracy, liqd, " "democracy, wiki, voting,participation, group decisions, " "decisions, decision-making")) try: return WSGIController.__call__(self, environ, start_response) except Exception, e: log.exception(e) model.meta.Session.rollback() raise
def show(self, id, format="html"): c.page_instance = get_entity_or_abort(model.Instance, id) require.instance.show(c.page_instance) if format == "json": return render_json(c.page_instance) if format == "rss": return self.activity(id, format) if c.page_instance != c.instance: redirect(h.entity_url(c.page_instance)) c.tile = tiles.instance.InstanceTile(c.page_instance) proposals = model.Proposal.all(instance=c.page_instance) c.new_proposals_pager = pager.proposals( proposals, size=7, enable_sorts=False, enable_pages=False, default_sort=sorting.entity_newest ) c.sidebar_delegations = ( _("Delegations are enabled.") if c.page_instance.allow_delegate else _("Delegations are disabled.") ) # pages = model.Page.all(instance=c.page_instance, # functions=[model.Page.NORM]) # c.top_pages_pager = pager.pages( # pages, size=7, enable_sorts=False, # enable_pages=False, default_sort=sorting.norm_selections) # tags = model.Tag.popular_tags(limit=40) # c.tags = sorted(text.tag_cloud_normalize(tags), # key=lambda (k, c, v): k.name) if asbool(config.get("adhocracy.show_instance_overview_milestones")) and c.page_instance.milestones: number = asint(config.get("adhocracy.number_instance_overview_milestones", 3)) milestones = model.Milestone.all_future_q(instance=c.page_instance).limit(number).all() c.next_milestones_pager = pager.milestones( milestones, size=number, enable_sorts=False, enable_pages=False, default_sort=sorting.milestone_time ) events = model.Event.find_by_instance(c.page_instance, limit=3) c.events_pager = pager.events(events, enable_pages=False, enable_sorts=False) c.stats = { "comments": model.Comment.all_q().count(), "proposals": model.Proposal.all_q(instance=c.page_instance).count(), "members": model.Membership.all_q().count(), } c.tutorial_intro = _("tutorial_instance_show_intro") c.tutorial = "instance_show" return render("/instance/show.html")
def get(request): data = V.get_schema.to_python(request.params, request) sleep_ms = asint(request.registry.settings['sleep_ms']) # Ignore gets from the queue, as they skew our response time results messages = MessageGetter.get(request.matchdict['qname'], sleep_ms, data['client'], data['timeout'], data['count']) if messages: return dict((msg.url(request), msg.data) for msg in messages) else: return exc.HTTPNoContent()
def regenerate_codes(self, user): ''' Regenerate and replace existing codes :param user: a :class:`User <allura.model.auth.User>` :return: codes, ``list[str]`` ''' count = asint(config.get('auth.multifactor.recovery_code.count', 10)) codes = [self.generate_one_code() for i in xrange(count)] self.replace_codes(user, codes) return codes
def command(self): self.basic_setup() base.log.info('IRCBot starting up...') while True: try: IRCBot(tg.config.get('forgechat.host', 'irc.freenode.net'), asint(tg.config.get('forgechat.port', '6667'))) asyncore.loop() except Exception: base.log.exception( 'Error in ircbot asyncore.loop(), restart in 5s') time.sleep(5)
def command(self): self.basic_setup() base.log.info('IRCBot starting up...') while True: try: IRCBot( tg.config.get('forgechat.host', 'irc.freenode.net'), asint(tg.config.get('forgechat.port', '6667'))) asyncore.loop() except Exception: base.log.exception('Error in ircbot asyncore.loop(), restart in 5s') time.sleep(5)
def paging_sanitizer(limit, page, total_count=sys.maxint, zero_based_pages=True): """Return limit, page - both converted to int and constrained to valid ranges based on total_count. Useful for sanitizing limit and page query params. """ limit = max(int(limit), 1) limit = min(limit, asint(tg.config.get('limit_param_max', 500))) max_page = (total_count / limit) + (1 if total_count % limit else 0) max_page = max(0, max_page - (1 if zero_based_pages else 0)) page = min(max(int(page or 0), (0 if zero_based_pages else 1)), max_page) return limit, page
def paste_demo_server_factory(global_conf, **local_conf): """ Example of PasteDeploy server factory using paste's httpserver """ conf = global_conf.copy() conf.update(local_conf) port = asint(conf.get('port') or 8080) host = conf.get('host') or '127.0.0.1' def server(app): from paste import httpserver httpserver.serve(app, host, port) return server
def test_regenerate_codes(self): class DummyRecoveryService(RecoveryCodeService): def replace_codes(self, user, codes): self.saved_user = user self.saved_codes = codes recovery = DummyRecoveryService() user = Mock(username='******') recovery.regenerate_codes(user) assert_equal(recovery.saved_user, user) assert_equal(len(recovery.saved_codes), asint(config.get('auth.multifactor.recovery_code.count', 10)))
def __init__(self, app, config): self.app = app self.compress_level = asint(config.get('gzip.compress_level', '5')) self.compress = set( aslist(config.get('gzip.compress', ''), ',', strip=True)) self.do_not_compress = set( aslist(config.get('gzip.do_not_compress', ''), ',', strip=True)) for m in (self.compress | self.do_not_compress): if mimetypes.guess_extension(m) is None: LOG.warning( 'Unrecognised mimetype in server configuration: %s' % m) self.cache_dir = normpath(config.get('deploy.cache_dir', None))
def amq_conn(self): if asbool(config.get('amqp.enabled', 'true')): if asbool(config.get('amqp.mock')): return MockAMQ(self) else: return Connection( hostname=config.get('amqp.hostname', 'localhost'), port=asint(config.get('amqp.port', 5672)), userid=config.get('amqp.userid', 'testuser'), password=config.get('amqp.password', 'testpw'), vhost=config.get('amqp.vhost', 'testvhost')) else: return None
def diff(self, prev_commit, fmt=None, prev_file=None, **kw): ''' :param prev_commit: previous commit to compare against :param fmt: "sidebyside", or anything else for "unified" :param prev_file: previous filename, if different :return: ''' try: path, filename = os.path.split(self._blob.path()) a_ci = c.app.repo.commit(prev_commit) a = a_ci.get_path(prev_file or self._blob.path()) apath = a.path() except Exception: # prev commit doesn't have the file a = M.repository.EmptyBlob() apath = '' b = self._blob if not self._blob.has_html_view: diff = "Cannot display: file marked as a binary type." return dict(a=a, b=b, diff=diff) # could consider making Blob.__iter__ do unicode conversion? # py2 unified_diff can handle some unicode but not consistently, so best to do ensure_str (can drop it on py3) la = [six.ensure_str(h.really_unicode(line)) for line in a] lb = [six.ensure_str(h.really_unicode(line)) for line in b] adesc = 'a' + h.really_unicode(apath) bdesc = 'b' + h.really_unicode(b.path()) if not fmt: fmt = web_session.get('diformat', '') else: web_session['diformat'] = fmt web_session.save() if fmt == 'sidebyside': if max(a.size, b.size) > asint( tg.config.get('scm.view.max_syntax_highlight_bytes', 500000)): # have to check the original file size, not diff size, because difflib._mdiff inside HtmlSideBySideDiff # can take an extremely long time on large files (and its even a generator) diff = '<em>File too large for side-by-side view</em>' else: hd = HtmlSideBySideDiff() diff = hd.make_table(la, lb, adesc, bdesc) else: # py2 unified_diff can handle some unicode but not consistently, so best to do str() and ensure_str() # (can drop it on py3) diff = str('').join( difflib.unified_diff(la, lb, six.ensure_str(adesc), six.ensure_str(bdesc))) return dict(a=a, b=b, diff=diff)
def index(self, **kw): if kw.pop('format', 'html') == 'raw': if self._blob.size > asint( tg.config.get('scm.download.max_file_bytes', 30 * 1000 * 1000)): large_size = self._blob.size flash( 'File is {}. Too large to download.'.format( h.do_filesizeformat(large_size)), 'warning') raise exc.HTTPForbidden else: return self.raw() elif 'diff' in kw: tg.decorators.override_template( self.index, 'jinja:allura:templates/repo/diff.html') return self.diff(kw['diff'], kw.pop('diformat', None), kw.pop('prev_file', None)) elif 'barediff' in kw: tg.decorators.override_template( self.index, 'jinja:allura:templates/repo/barediff.html') return self.diff(kw['barediff'], kw.pop('diformat', None), kw.pop('prev_file', None)) else: force_display = 'force' in kw if self._blob.size > asint( tg.config.get('scm.view.max_file_bytes', 5 * 1000 * 1000)): large_size = self._blob.size stats = None else: large_size = False stats = utils.generate_code_stats(self._blob) return dict( blob=self._blob, stats=stats, force_display=force_display, large_size=large_size, )
def test_regenerate_codes(self): class DummyRecoveryService(RecoveryCodeService): def replace_codes(self, user, codes): self.saved_user = user self.saved_codes = codes recovery = DummyRecoveryService() user = Mock(username='******') recovery.regenerate_codes(user) assert_equal(recovery.saved_user, user) assert_equal( len(recovery.saved_codes), asint(config.get('auth.multifactor.recovery_code.count', 10)))