def __init__(self, db_path=None, db_default_file=None, db_cache_path=None, con=None, dump_yaml_in_index=None, vonly=False): self.db_path = db_path or conf.get('db_path') self.db_default_file = db_default_file or conf.get('db_default_file') self.db_cache_path = db_cache_path or conf.get('db_cache_path') if vonly: return # Use a separate index for projects (same as for users) as mapping # name/type collision will occured as commits have dynamic mapping self.eprojects = EProjects( connector=(con or index.Connector(index_suffix='projects'))) self.el_version = self.eprojects.es.info().get('version', {}).get('number', '') if dump_yaml_in_index: YAMLDefinition.__init__(self, self.db_path, self.db_default_file, self.db_cache_path) issues = self.validate() if issues: raise RuntimeError(issues) self._enrich_projects() projects, rid2projects = self._flatten_projects() self.eprojects.load(projects, rid2projects)
def __init__(self, db_path=None, db_default_file=None, db_cache_path=None): db_cache_path = db_cache_path or conf.get('db_cache_path') or db_path self.yback = YAMLBackend( db_path or conf.get('db_path'), db_default_file=db_default_file or conf.get('db_default_file'), db_cache_path=db_cache_path) self.yback.load_db() self.hashes_str = SHA.new("".join(self.yback.hashes)).hexdigest() self.default_data, self.data = self.yback.get_data() self._merge()
def __init__(self, db_path=None, db_default_file=None, db_cache_path=None): db_cache_path = db_cache_path or conf.get('db_cache_path') or db_path self.yback = YAMLBackend( db_path or conf.get('db_path'), db_default_file=db_default_file or conf.get('db_default_file'), db_cache_path=db_cache_path) self.yback.load_db() self.hashes_str = SHA.new( "".join(self.yback.hashes).encode(errors='ignore')).hexdigest() self.default_data, self.data = self.yback.get_data() self._merge()
def __init__(self, db_path=None, db_default_file=None, vonly=False, db_cache_path=None): self.db_path = db_path or conf.get('db_path') self.db_default_file = db_default_file or conf.get('db_default_file') self.db_cache_path = db_cache_path or conf.get('db_cache_path') YAMLDefinition.__init__( self, self.db_path, self.db_default_file, self.db_cache_path) self.enriched_groups = False self.enriched_idents = False if not vonly: self._users = users.Users( index.Connector(index_suffix='users'))
def _gen_context_(): context = dict(conf['branding']) if not 'brand_home' in context: context['brand_home'] = 'http://www.' + context['brand_domain'] if conf.get('enable_contact_extra', False): context['enable_contact_extra'] = True context['contact_extra_required'] = conf.get('contact_extra_required', False) context['contact_extra_label'] = conf.get('contact_extra_label', 'Extra') context['contact_extra_prompt'] = conf.get('contact_extra_prompt', 'Extra data...') context['enable_billing'] = conf.get('enable_billing', True) context['fieldset_2_header'] = conf.get('fieldset_2_header', 'Verification') context['codes'] = codes() context['countries'] = countries() context['timezones'] = tzlist() agreements = conf.get('agreements', None) if conf.get('enable_agreements', True) and agreements and len(agreements): context['agreements'] = {} for id in agreements: descr = agreements[id].split('@') title = descr[0] url = descr[1] if len(descr) > 1 else '/brand/doc/' + id + '.html' context['agreements'][id] = {'title': title, 'url': url} return context
def callback(self, **kwargs): auth_context = kwargs auth_context['response'] = kwargs auth_context['calling_back'] = True try: # Verify the state previously put in the db state = auth_context.get('state', None) back, provider = db.get_url(state) if not back: err = 'OAuth callback with forged state, discarding' logger.debug(err) raise base.UnauthenticatedError(err) auth_plugin = self.auth_plugins.get(provider) if not auth_plugin: msg = 'Unknown OAuth provider: %s' % provider logger.error(msg) raise base.UnauthenticatedError(msg) logger.debug('Callback called by OAuth provider %s' % provider) auth_context['back'] = back valid_user = auth_plugin.authenticate(**auth_context) except base.UnauthenticatedError as e: response.status = 401 auth_methods = [k for k, v in conf.get('auth', {})] return render('login.html', dict(back=back, message='Authorization failure: %s' % e, auth_methods=auth_methods)) logger.info( '%s (%s) successfully authenticated with OAuth2.' % (valid_user['login'], valid_user['email'])) common.setup_response(valid_user, back)
def authorize(self, request, uid=None): """Make sure the request is authorized. Returns the authorized user's uid or raises if unauthorized.""" if not request.remote_user: request.remote_user = request.headers.get('Remote-User') if not request.remote_user: request.remote_user = request.headers.get('X-Remote-User') if request.remote_user == '(null)': if request.headers.get('Authorization'): auth_header = request.headers.get('Authorization').split()[1] request.remote_user = base64.b64decode(auth_header).split( ':')[0] if (request.remote_user == "admin" and request.headers.get('Admin-Token')): sent_admin_token = request.headers.get('Admin-Token') # If remote-user is admin and an admin-token is passed # authorized if the token is correct if sent_admin_token == conf.get('admin_token'): return 'admin' else: # If uid targeted by the request is the same # as the requester then authorize if uid and uid == request.remote_user: return uid if uid and uid != request.remote_user: raise UnauthorizedException("Admin action only") raise UnauthorizedException("unauthorized")
def callback(self, **kwargs): auth_context = kwargs auth_context['response'] = kwargs auth_context['calling_back'] = True try: # Verify the state previously put in the db state = auth_context.get('state', None) back, _ = db.get_url(state) if not back: err = 'GITHUB callback called with an unknown state.' raise base.UnauthenticatedError(err) auth_context['back'] = back valid_user = self.auth_plugin.authenticate(**auth_context) except base.UnauthenticatedError as e: response.status = 401 auth_methods = [k for k, v in conf.get('auth', {})] return render('login.html', dict(back=back, message='Authorization failure: %s' % e, auth_methods=auth_methods)) logger.info( '%s (%s) successfully authenticated with github.' % (valid_user['login'], valid_user['email'])) common.setup_response(valid_user, back)
def index(self, **kwargs): if 'back' not in kwargs: logger.error('Client requests authentication without back url.') abort(422) auth_context = kwargs auth_context['response'] = response auth_plugin = driver.DriverManager( namespace='cauth.authentication', name='GithubPersonalAccessToken', invoke_on_load=True, invoke_args=(conf,)).driver try: valid_user = auth_plugin.authenticate(**auth_context) except base.UnauthenticatedError as e: response.status = 401 auth_methods = [k for k, v in conf.get('auth', {})] return render('login.html', dict(back=auth_context['back'], message='Authorization failure: %s' % e, auth_methods=auth_methods)) msg = '%s (%s) authenticated with Github Personal Access Token.' logger.info(msg % (valid_user['login'], valid_user['email'])) common.setup_response(valid_user, auth_context['back'])
def __init__(self, db_path=None, db_default_file=None): self.yback = YAMLBackend(db_path or conf.db_path, db_default_file=db_default_file or conf.get('db_default_file')) self.yback.load_db() self.default_data, self.data = self.yback.get_data() self._merge()
def __init__(self, db_path, db_default_file=None, db_cache_path=None): """ Class to read YAML files from a DB path. db_default_file: is the path to a trusted file usually computed from an already verified data source. db_path: directory where data can be read. This is supposed to be user provided data to be verified by the caller and could overwrite data from the default_file. db_cache_path: directory to store cache files """ self.db_path = db_path or conf.get('db_path') self.db_default_file = db_default_file self.db_cache_path = (db_cache_path or conf.get('db_cache_path') or self.db_path) self.default_data = None self.data = [] # List of hashes self.hashes = []
def __init__(self, db_path, db_default_file=None, db_cache_path=None): """ Class to read YAML files from a DB path. db_default_file: is the path to a trusted file usually computed from an already verified data source. db_path: directory where data can be read. This is supposed to be user provided data to be verified by the caller and could overwrite data from the default_file. db_cache_path: directory to store cache files """ self.db_path = db_path or conf.get('db_path') self.db_default_file = db_default_file self.db_cache_path = ( db_cache_path or conf.get('db_cache_path') or self.db_path) self.default_data = None self.data = [] # List of hashes self.hashes = []
def __init__(self, db_path=None, db_default_file=None, db_cache_path=None, con=None, dump_yaml_in_index=None, vonly=False): self.db_path = db_path or conf.get('db_path') self.db_default_file = db_default_file or conf.get('db_default_file') self.db_cache_path = db_cache_path or conf.get('db_cache_path') if vonly: return # Use a separate index for projects (same as for users) as mapping # name/type collision will occured as commits have dynamic mapping self.eprojects = EProjects( connector=(con or index.Connector(index_suffix='projects'))) self.el_version = self.eprojects.es.info().get( 'version', {}).get('number', '') if dump_yaml_in_index: YAMLDefinition.__init__( self, self.db_path, self.db_default_file, self.db_cache_path) issues = self.validate() if issues: raise RuntimeError(issues) self._enrich_projects() projects, rid2projects = self._flatten_projects() self.eprojects.load(projects, rid2projects)
def get_status(self): projects_index = Projects() projects = projects_index.get_projects(source=['name', 'refs']) num_projects = len(projects) num_repos = len(set([ ref['name'] for ref in itertools.chain( *[p['refs'] for p in list(projects.values())])])) return {'customtext': index_custom_html, 'projects': num_projects, 'repos': num_repos, 'users_endpoint': conf.get('users_endpoint', False), 'version': rx_version}
def disk_has_space(_popen=None): """ If the disk where repos/binaries live doesn't have enough space, fail the health check to prevent failing when the binaries are posted """ popen = _popen or subprocess.Popen paths = [conf.get('repos_root'), conf.get('binary_root')] for path in paths: if not path: continue command = ['df', path] result = popen(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE) result.wait(timeout=30) if result.returncode > 0: raise SystemCheckError( "failed disk check for %s: %s" % (path, result.stderr.read().decode()) ) out = result.communicate()[0].decode() device, size, used, available, percent, mountpoint = \ out.split('\n')[1].split() if int(percent.strip().split('%')[0]) > 85: msg = 'disk %s almost full. Used: %s%%' % (device, percent) raise SystemCheckError(msg)
def disk_has_space(_popen=None): """ If the disk where repos/binaries doesn't have enough space, fail the health check to prevent failing when the binaries are getting posted """ popen = _popen or subprocess.Popen command = ['df', conf.get('repo_path', '/')] result = popen(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE) if result.returncode > 0: raise SystemCheckError("failed disk check: %s" % result.stderr.read()) out = result.communicate()[0] device, size, used, available, percent, mountpoint = out.split('\n')[1].split() if int(percent.strip().split('%')[0]) > 85: msg = 'disk %s almost full. Used: %s' % (device, percent) raise SystemCheckError(msg)
def disk_has_space(_popen=None): """ If the disk where repos/binaries doesn't have enough space, fail the health check to prevent failing when the binaries are getting posted """ popen = _popen or subprocess.Popen command = ['df', conf.get('repo_path', '/')] result = popen(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE) if result.returncode > 0: raise SystemCheckError("failed disk check: %s" % result.stderr.read()) out = result.communicate()[0] device, size, used, available, percent, mountpoint = out.split( '\n')[1].split() if int(percent.strip().split('%')[0]) > 85: msg = 'disk %s almost full. Used: %s' % (device, percent) raise SystemCheckError(msg)
def get_status(self): projects_index = Projects() projects = projects_index.get_projects(source=['name', 'refs']) num_projects = len(projects) num_repos = len( set([ ref['name'] for ref in itertools.chain( *[p['refs'] for p in projects.values()]) ])) return { 'customtext': index_custom_html, 'projects': num_projects, 'repos': num_repos, 'users_endpoint': conf.get('users_endpoint', False), 'version': rx_version }
def callback(self, **kwargs): auth_context = kwargs.copy() auth_context['response'] = kwargs auth_context['calling_back'] = True try: back = auth_context['back'] valid_user = self.auth_plugin.authenticate(**auth_context) except base.UnauthenticatedError as e: response.status = 401 auth_methods = [k for k, v in conf.get('auth', {})] return render('login.html', dict(back=back, message='Authorization failure: %s' % e, auth_methods=auth_methods)) logger.info( '%s (%s) successfully authenticated with OpenID.' % (valid_user['login'], valid_user['email'])) common.setup_response(valid_user, back)
def callback(self, **kwargs): auth_context = kwargs auth_context['response'] = kwargs auth_context['calling_back'] = True try: state = auth_context.get('state', None) back, provider = db.get_url(state) if not back or provider != "openid_connect": err = 'OpenID Connect callback with forged state, discarding' logger.debug(err) raise base.UnauthenticatedError(err) auth_context['back'] = back valid_user = self.auth_plugin.authenticate(**auth_context) except base.UnauthenticatedError as e: response.status = 401 auth_methods = [k for k, v in conf.get('auth', {})] return render('login.html', dict(back=back, message='Authorization failure: %s' % e, auth_methods=auth_methods)) logger.info( '%s (%s) successfully authenticated with OpenIDConnect.' % (valid_user['login'], valid_user['email'])) common.setup_response(valid_user, back)
# limitations under the License. import copy import hashlib from pecan import abort from pecan import conf from pecan import expose from repoxplorer import index from repoxplorer.controllers import utils from repoxplorer.index.commits import Commits from repoxplorer.index.projects import Projects from repoxplorer.index.contributors import Contributors xorkey = conf.get('xorkey') or 'default' class TopAuthorsController(object): def resolv_name(self, commits, authors): name_to_requests = [] for v in authors: if not v['name']: name_to_requests.append(v['email']) if name_to_requests: raw_names = commits.get_commits_author_name_by_emails( name_to_requests) for v in authors: v['name'] = v['name'] or raw_names[v['email']] del v['email']
class OpenIDConnectEngine(BaseAuthEngine): """Expects a Bearer token sent through the 'Authorization' header. The token is verified against a JWK, pulled from the well-known configuration of the OIDC provider. The claims will be used to provision users if authorization is successful.""" config = conf.get('oidc', {}) def is_configured(self): return self.config.get('issuer_url', False) def _get_issuer_info(self): issuer_url = self.config.get('issuer_url') verify_ssl = self.config.get('verify_ssl', True) issuer_info = requests.get(urljoin(issuer_url, '.well-known/openid-configuration'), verify=verify_ssl) if issuer_info.status_code > 399: raise UnauthorizedException( "Cannot fetch OpenID provider's configuration") return issuer_info.json() def _get_signing_key(self, jwks_uri, key_id): verify_ssl = self.config.get('verify_ssl', True) certs = requests.get(jwks_uri, verify=verify_ssl) if certs.status_code > 399: raise UnauthorizedException("Cannot fetch JWKS") for k in certs.json()['keys']: if k['kid'] == key_id: return (jwt.algorithms.RSAAlgorithm.from_jwk(json.dumps(k)), k['alg']) raise UnauthorizedException("Key %s not found" % key_id) def _get_raw_token(self, request): if request.headers.get('Authorization', None) is None: raise UnauthorizedException('Missing "Authorization" header') auth_header = request.headers.get('Authorization', None) if not auth_header.lower().startswith('bearer '): raise UnauthorizedException('Invalid "Authorization" header') token = auth_header[len('bearer '):] return token def authorize(self, request, uid=None): token = self._get_raw_token(request) issuer_info = self._get_issuer_info() unverified_headers = jwt.get_unverified_header(token) key_id = unverified_headers.get('kid', None) if key_id is None: raise UnauthorizedException("Missing key id in token") jwks_uri = issuer_info.get('jwks_uri') if jwks_uri is None: raise UnauthorizedException("Missing JWKS URI in config") key, algo = self._get_signing_key(jwks_uri, key_id) try: claims = jwt.decode(token, key, algorithms=algo, issuer=issuer_info['issuer'], audience=self.config['audience']) except Exception as e: raise UnauthorizedException('Invalid access token: %s' % e) if claims['preferred_username'] == self.config.get( 'admin_username', 'admin'): return 'admin' if uid and uid == claims['preferred_username']: return uid if uid and uid != claims['preferred_username']: raise UnauthorizedException("Only the admin ") raise UnauthorizedException('unauthorized') def provision_user(self, request): raw_token = self._get_raw_token(request) # verified before so it's totally okay claims = jwt.decode(raw_token, verify=False) # TODO assuming the presence of claims, but a specific scope might be # needed. # These are expected to be standard though, see # https://openid.net/specs/openid-connect-core-1_0.html#StandardClaims email = claims['email'] uid = claims['preferred_username'] name = claims['name'] _users = users.Users(index.Connector(index_suffix='users')) u = _users.get(uid) infos = { 'uid': uid, 'name': name, 'default-email': email, 'emails': [{ 'email': email }] } if u: _users.update(infos) else: _users.create(infos)
def is_configured(self): return conf.get('users_endpoint', False)
def get(self, **kwargs): response.delete_cookie('auth_pubtkt', domain=conf.app.cookie_domain) auth_methods = [k for k, v in conf.get('auth', {})] return dict(back='/', message=LOGOUT_MSG, auth_methods=auth_methods)
def run(self, args): super(GenerateMapCommand, self).run(args) out("LOADING ENVIRONMENT") self.load_app() models.init_model() try: out("STARTING A TRANSACTION...") models.start() template = map_template.format(timestamp=timestamp()) for project in conf.projects: p_query = models.Project.query.filter_by(name=project['name']) p = p_query.first() if p: # lets update p_kw = dict((k, v) for k, v in project.items() if k != 'docs') p_query.update(p_kw) else: p = models.Project(name=project['name'], fqdn=project['fqdn']) template = template + '\n# redirects for %s\n' % p.name redirects = [] for doc in project.get('docs', []): d = p.get_doc(doc['name']) if d: for k, v in doc.items(): if k == 'redirect': pass setattr(d, k, v) if not d: d = models.Doc(p, **doc) if doc.get('redirect'): line = "{prefix} {redirect};\n".format( prefix=d.prefix_regex or d.url_prefix, redirect=d.redirect_to ) redirects.append((d.weight, line)) lines = ''.join([v for k, v in sorted(redirects, reverse=True)]) template += lines # Create the JS here = os.path.abspath(os.path.dirname(__file__)) top_path = os.path.abspath(os.path.dirname(os.path.dirname(here))) public_path = os.path.join(top_path, 'public') js_path = os.path.join(public_path, 'js') project_js = os.path.join(js_path, "%s.js" % project['name']) with open(project_js, 'w') as js_file: project_url_part = "/projects/%s/" % project['name'] project_url = "%s%s" % (conf.ayni_fqdn.strip('/'), project_url_part) t = Template(templates.js) contents = t.substitute({ 'ayni_css_file': conf.ayni_css_file, 'project_url': project_url, }) js_file.write(contents) models.commit() with open(conf.get('map_path', 'ayni.map'), 'w') as f: f.write(template) f.write(extra_rules(conf.get('extra_redirect_rules', ''))) except: models.rollback() out("ROLLING BACK... ") raise else: out("COMMITING... ") models.commit()
def _get_app_env(): return conf.get('env', 'stdcfg')
import base64 from pecan import conf from pecan import abort from pecan import expose from pecan import request from pecan import response from pecan.rest import RestController from repoxplorer import index from repoxplorer.index import users from repoxplorer.controllers import utils endpoint_active = conf.get('users_endpoint', False) admin_token = conf.get('admin_token') xorkey = conf.get('xorkey') or 'default' class UsersController(RestController): def _authorize(self, uid=None): if not endpoint_active: abort(403) # Shortcircuit the authorization for testing purpose # return if not request.remote_user: request.remote_user = request.headers.get('Remote-User') if not request.remote_user: request.remote_user = request.headers.get('X-Remote-User')
def get(self, **kwargs): back = kwargs.get('back', '/auth/logout') logger.info('Client requests the login page.') auth_methods = [k for k, v in conf.get('auth', {})] return dict(back=back, message='', auth_methods=auth_methods)
# limitations under the License. import base64 from pecan import conf from pecan import abort from pecan import expose from pecan import request from pecan import response from pecan.rest import RestController from repoxplorer import index from repoxplorer.index import users from repoxplorer.controllers import utils endpoint_active = conf.get('users_endpoint', False) admin_token = conf.get('admin_token') xorkey = conf.get('xorkey') or 'default' class UsersController(RestController): def _authorize(self, uid=None): if not endpoint_active: abort(403) # Shortcircuit the authorization for testing purpose # return if not request.remote_user: request.remote_user = request.headers.get('Remote-User') if not request.remote_user: request.remote_user = request.headers.get('X-Remote-User') if request.remote_user == '(null)':
# See the License for the specific language governing permissions and # limitations under the License. from pecan import conf from pecan import abort from pecan import expose from pecan import request from pecan import response from pecan.rest import RestController from repoxplorer import index from repoxplorer.exceptions import UnauthorizedException from repoxplorer.index import users from repoxplorer.controllers import utils if conf.get('users_endpoint', False) and conf.get('oidc', False): from repoxplorer.auth import OpenIDConnectEngine as AuthEngine else: from repoxplorer.auth import CAuthEngine as AuthEngine AUTH_ENGINE = AuthEngine() xorkey = conf.get('xorkey') or 'default' class UsersController(RestController): auth = AUTH_ENGINE def abort_if_not_active(self): if not self.auth.is_configured(): abort(403)
# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import requests from pecan import conf from fuel_plugin.ostf_adapter.storage import engine, models from fuel_plugin.ostf_adapter.nose_plugin import nose_discovery CORE_PATH = conf.debug_tests if conf.get('debug_tests') else 'fuel_health' def discovery_check(cluster): #get needed information from nailgun via series of #requests to nailgun api. At this time we need #info about deployment type(ha, non-ha), type of network #management (nova-network, quntum) and attributes that #indicate that savanna/murano is installed cluster_deployment_args = _get_cluster_depl_tags(cluster) cluster_data = { 'cluster_id': cluster, 'deployment_tags': cluster_deployment_args }
# Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import itertools from pecan import expose from pecan import conf from repoxplorer import version from repoxplorer.index.projects import Projects rx_version = version.get_version() index_custom_html = conf.get('index_custom_html', '') class StatusController(object): @expose('json') def version(self): return {'version': rx_version} def get_status(self): projects_index = Projects() projects = projects_index.get_projects(source=['name', 'refs']) num_projects = len(projects) num_repos = len( set([ ref['name'] for ref in itertools.chain( *[p['refs'] for p in projects.values()])
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import copy import logging from pecan import conf from repoxplorer import index from repoxplorer.index import YAMLDefinition from repoxplorer.index import date2epoch from repoxplorer.index import users from datetime import datetime user_endpoint_active = conf.get('users_endpoint', False) logger = logging.getLogger(__name__) contributors_schema = """ $schema: http://json-schema.org/draft-04/schema definitions: group: anyOf: - type: object additionalProperties: false properties: begin-date: type: string end-date: type: string
# Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import itertools from pecan import expose from pecan import conf from repoxplorer import version from repoxplorer.index.projects import Projects rx_version = version.get_version() index_custom_html = conf.get('index_custom_html', '') class StatusController(object): @expose('json') def version(self): return {'version': rx_version} def get_status(self): projects_index = Projects() projects = projects_index.get_projects(source=['name', 'refs']) num_projects = len(projects) num_repos = len(set([ ref['name'] for ref in itertools.chain(