def decorator(*args, **kwargs): if not request.remote_user: return jsonify({'status': 'error', 'message': 'unauthorized'}), 401 ContextFilter.set_field('user_id', request.remote_user) ContextFilter.set_field('source_ip', request.remote_addr) return f(*args, **kwargs)
def put(self, group): """ Creates a new playbook for a group. """ # Serializer so we can handle json and yaml serializer = serialize.Serialize(request.args.get( 'format', 'json')) ContextFilter.set_field('user_id', request.remote_user) # Gets the formatter info current_app.logger.info( 'Creating a new playbook for group %s by user %s. ' 'Request id: %s' % ( group, request.remote_user, request.request_id)) try: playbook = serializer.load(request.data) validate_playbook(playbook) playbook["group"] = str(group) id = g.db.re.playbooks.insert(playbook) return jsonify({'status': 'created', 'id': str(id)}), 201 except (KeyError, ValueError), ke: return jsonify( {'status': 'bad request', 'message': str(ke)}), 400
def environment_allow_all(username, playbook, maplist): # pragma: no cover """ Helpful when debugging locally. Don't use this in production. """ ContextFilter.set_field('user_id', username) ContextFilter.set_field('playbook_id', playbook) return True
def get_confirmation(self, group, exchange='re'): """ Gets a confirmation that the release job has been accepted and started. group is the name of the group exchange is the MQ exchange to emit on. Default: re """ self.logger.info( 'Listening for response on temp queue %s for request id %s' % ( self._tmp_q.method.queue, self.request_id)) for method_frame, header_frame, body in self._channel.consume( self._tmp_q.method.queue): self.logger.debug('Message received: %s for request id %s' % ( body, self.request_id)) try: self.logger.info('Parsing bus response for request id %s' % ( self.request_id)) job_id = json.loads(body)['id'] ContextFilter.set_field('deployment_id', job_id) self._channel.basic_ack(method_frame.delivery_tag) self.logger.info('Got job id of %s for request id %s' % ( job_id, self.request_id)) # Send out a notification that the job has been created. properties = pika.spec.BasicProperties(app_id='rerest') self._channel.basic_publish( exchange, 'notification', json.dumps({ 'slug': 'Started %s' % job_id, 'message': "Project %s's job %s has been started." % ( group, job_id), 'phase': 'created', 'target': [] }), properties=properties) return job_id except ValueError, vex: self.logger.error( 'Rejecting bus response due to error for ' 'request id %s' % self.request_id) self._channel.basic_reject(method_frame.delivery_tag) self.logger.error( 'Could not load JSON message. ' 'Rejecting message. Error: %s for request id %s' % ( vex, self.request_id)) except pika.exceptions.ChannelClosed: self.logger.error( 'The channel has unexpectedly closed. request id %s' % ( self.request_id))
def decorator(*args, **kwargs): # Load the callable mod, meth = current_app.config['AUTHORIZATION_CALLABLE'].split(':') check_auth = getattr(__import__(mod, fromlist=['True']), meth) ContextFilter.set_field('user_id', request.remote_user) if check_auth(request.remote_user, request.view_args)[0]: current_app.logger.debug( 'User %s successfully authenticated for %s' % ( request.remote_user, request.path)) return f(*args, **kwargs) current_app.logger.warn( 'User %s failed authentication for %s' % ( request.remote_user, request.path)) return jsonify({'status': 'error', 'message': 'forbidden'}), 403
def get(self, group=None, id=None): """ Gets a list or single playbook and returns it to the requestor. """ # Serializer so we can handle json and yaml serializer = serialize.Serialize(request.args.get( 'format', 'json')) if id is None: # List playbooks ContextFilter.set_field('user_id', request.remote_user) current_app.logger.info( 'User %s is listing known playbooks for group %s. ' 'Request id: %s' % ( request.remote_user, group, request.request_id)) # Sort by most recently added first if group is None: playbooks = g.db.re.playbooks.find().sort([('_id', -1)]) else: playbooks = g.db.re.playbooks.find({"group": str(group)}).sort([('_id', -1)]) items = [] for item in playbooks: item["id"] = str(item["_id"]) del item["_id"] items.append(item) # This must be a Response since it can be YAML or JSON. return Response( response=serializer.dump({'status': 'ok', 'items': items}), status=200, mimetype=serializer.mimetype) # One playbook playbook = g.db.re.playbooks.find_one({ "_id": ObjectId(id), "group": str(group)}) if playbook is None: return jsonify({'status': 'not found'}), 404 current_app.logger.debug( 'Listing known playbook %s for group %s. ' 'Request id: %s' % ( id, group, request.request_id)) del playbook["_id"] return Response( response=serializer.dump({'status': 'ok', 'item': playbook}), status=200, mimetype=serializer.mimetype)
def post(self, group, id): """ Replaces a playbook for a group. """ # Serializer so we can handle json and yaml serializer = serialize.Serialize(request.args.get( 'format', 'json')) ContextFilter.set_field('user_id', request.remote_user) current_app.logger.info( 'Updating a playbook for group %s by user %s. ' 'Request id: %s' % ( group, request.remote_user, request.request_id)) try: oid = ObjectId(id) except InvalidId: return Response( response=serializer.dump( {'status': 'bad request', 'message': 'Bad id'}), status=400, mimetype=serializer.mimetype) exists = g.db.re.playbooks.find_one({"_id": oid}) if exists: try: playbook = serializer.load(request.data) playbook["group"] = str(group) validate_playbook(playbook) g.db.re.playbooks.update({"_id": oid}, playbook) return jsonify({ 'status': 'ok', 'id': str(exists['_id'])}), 200 except (KeyError, ValueError), ke: return jsonify({ 'status': 'bad request', 'message': str(ke)}), 400 except ValidationError: current_app.logger.error("The playbook does not conform to the spec") return jsonify({ 'status': 'bad request', 'message': ('The playbook does not ' 'conform to the spec.')}), 400
def environment_flat_files(username, playbook, maplist): """ Opens a flat file when checking for environments. """ ContextFilter.set_field('user_id', username) ContextFilter.set_field('playbook_id', playbook) try: envcfg = current_app.config['ENVIRONMENT_FLAT_FILES'] groupcfg = current_app.config['GROUP_ENVIRONMENT_MAPPING'] except KeyError: current_app.logger.warn( 'No ENVIRONMENT_FLAT_FILES set in config. Denying everything.') return False allowed_envs = [] for m in maplist: try: allowed_envs += groupcfg[m] except Exception, ex: # If the group doesn't exist, that's fine, add nothing current_app.logger.debug( 'Environment %s has no mapping. Skipping...' % m)
def delete(self, group, id): """ Deletes a playbook. """ # NOTE: there is now format parameter since this doesn't accept or # return a playbook. try: oid = ObjectId(id) except InvalidId: current_app.logger.error("The playbook ID given is invalid: %s" % str(id)) return jsonify({'status': 'bad request', 'message': 'Bad id'}), 400 ContextFilter.set_field('user_id', request.remote_user) current_app.logger.info( 'Deleting playbook %s for group %s by user %s. ' 'Request id: %s' % ( id, group, request.remote_user, request.request_id)) exists = g.db.re.playbooks.find_one({"_id": oid}) if exists: g.db.re.playbooks.remove({"_id": oid}) return jsonify({'status': 'gone'}), 410 return jsonify({'status': 'not found'}), 404
def decorator(*args, **kwargs): # First check access to the requested group mod, meth = current_app.config['AUTHORIZATION_CALLABLE'].split(':') check_auth = getattr(__import__(mod, fromlist=['True']), meth) auth_check = check_auth(request.remote_user, request.view_args) if auth_check[0]: ContextFilter.set_field('user_id', request.remote_user) current_app.logger.debug( 'User %s successfully authenticated for %s' % ( request.remote_user, request.path)) # Now check for environment permissions emod, emeth = current_app.config[ 'AUTHORIZATION_ENVIRONMENT_CALLABLE'].split(':') check_env_auth = getattr( __import__(emod, fromlist=['True']), emeth) if check_env_auth( request.remote_user, request.view_args.get('id', None), auth_check[1]): current_app.logger.info( 'User %s has access to all hosts listed in %s' % ( request.remote_user, request.view_args['id'])) return f(*args, **kwargs) current_app.logger.info( 'User %s does not have permission to all hosts in %s' % ( request.remote_user, request.view_args['id'])) return jsonify({ 'status': 'error', 'message': ('Your user does not have access to all the ' 'hosts in the given playbook.')}), 403 current_app.logger.warn( 'User %s failed authentication for %s' % ( request.remote_user, request.path)) return jsonify({'status': 'error', 'message': 'forbidden'}), 403
def get(self): """ Gets a list of groups and returns it to the requestor. """ # Serializer so we can handle json and yaml serializer = serialize.Serialize(request.args.get( 'format', 'json')) # List playbooks ContextFilter.set_field('user_id', request.remote_user) current_app.logger.info( 'User %s is listing known groups. Request id: %s' % ( request.remote_user, request.request_id)) groups = g.db.re.playbooks.distinct('group') items = [] for group in groups: count = g.db.re.playbooks.find({'group': group}).count() items.append({'name': group, 'count': int(count)}) # This must be a Response since it can be YAML or JSON. return Response( response=serializer.dump({'status': 'ok', 'items': items}), status=200, mimetype=serializer.mimetype)
def get(self, dpid): """Get the status of a deployment with a state id of ``dpid``.""" filter = { '_id': ObjectId(str(dpid)) } # Limit fields returned just to: projection = { "created": 1, "ended": 1, "failed": 1, "active_step": 1, "failed_step": 1, '_id': 0 } record = g.db.re.state.find_one( filter, projection ) ContextFilter.set_field('deployment_id', str(dpid)) if record is None: # 404: "no such deployment" - the deployment requested does not exist response = { "status": "not found", "deployment": dpid } status_code = 404 current_app.logger.info("Deployment status: Not found") else: # 200: "completed" - the deployment requested exists and # finished successfully failed = False AND ended is not # None if ((record['failed'] is not None) and (record['failed'] is not True) and (record['ended'] is not None)): duration = record['ended'] - record['created'] response = { "created": record['created'], "deployment": dpid, "duration": int(duration.total_seconds()), "ended": record['ended'], "status": "completed", } status_code = 200 current_app.logger.info("Deployment status: Completed without error") # 202: "currently running step frob:Nicate" - the # deployment requested exists, but is still being # processed ended is not None elif record['ended'] is None: duration = dt.utcnow() - record['created'] response = { "active_step": record['active_step'], "created": record['created'], "deployment": dpid, "duration": int(duration.total_seconds()), "status": "in progress", } status_code = 202 current_app.logger.info("Deployment status: In progress") # 400: "failed" - the deployment requested exists but # finished with errors elif ((record['failed'] is True) and (record['ended'] is not None)): duration = record['ended'] - record['created'] response = { "created": record['created'], "deployment": dpid, "duration": int(duration.total_seconds()), "ended": record['ended'], "failed_step": record['failed_step'], "status": "failed", } status_code = 400 current_app.logger.info("Deployment status: Failed") return jsonify(response), status_code
def put(self, group, id): """ Creates a new deployment. """ try: ContextFilter.set_field('user_id', request.remote_user) ContextFilter.set_field('source_ip', request.remote_addr) current_app.logger.info( 'Starting release for %s as %s for user %s' % ( group, request.request_id, request.remote_user)) mq_data = current_app.config['MQ'] jc = mq.JobCreator( mq_data, logger=current_app.logger, request_id=request.request_id ) ContextFilter.set_field('playbook_id', id) current_app.logger.info( 'Creating job for group %s, playbook %s' % ( group, id)) try: dynamic = json.loads(request.data) # If we got nothing then raise (to catch) if dynamic is None: raise ValueError('No data') except ValueError: current_app.logger.info( 'No data sent in request for dynamic variables.') dynamic = {} current_app.logger.info( "Received dynamic keys: %s" % ( str(dynamic))) jc.create_job(group, id, dynamic=dynamic) confirmation_id = jc.get_confirmation(group) current_app.logger.debug( 'Confirmation id received for request id %s' % ( request.request_id)) if confirmation_id is None: current_app.logger.debug( 'Confirmation for %s was None meaning the ' 'group does not exist. request id %s' % ( group, request.request_id)) current_app.logger.info( 'Bus could not find group for request id %s' % ( request.request_id)) return jsonify({ 'status': 'error', 'message': 'group not found'}), 404 current_app.logger.debug( 'Confirmation for %s/%s is %s. request id %s' % ( group, id, confirmation_id, request.request_id)) current_app.logger.info( 'Created release as %s for request id %s' % ( confirmation_id, request.request_id)) return jsonify({'status': 'created', 'id': confirmation_id}), 201 except KeyError, kex: current_app.logger.error( 'Error creating job for %s/%s. Missing ' 'something in the MQ config section? %s: %s. ' 'Request id: %s' % ( group, id, type(kex).__name__, kex, request.request_id))
def ldap_search(username, params): """ Searches ldap for a user and then matches and tries to match up an ldap field to a lookup. """ ContextFilter.set_field('user_id', username) try: import ldap import ldap.filter cfg = current_app.config['AUTHORIZATION_CONFIG'] conn = ldap.initialize(cfg['LDAP_URI']) conn.simple_bind_s( cfg.get('LDAP_USER', ''), cfg.get('LDAP_PASSWORD', '')) search_results = conn.search_s( str(cfg['LDAP_SEARCH_BASE']), ldap.SCOPE_SUBTREE, '(%s=%s)' % ( str(cfg['LDAP_MEMBER_ID']), ldap.filter.escape_filter_chars(username, 1)), [str(cfg['LDAP_FIELD_MATCH'])] ) current_app.logger.debug('LDAP search result: %s' % str(search_results)) allowed_groups = [] has_access = False if len(search_results) >= 1: keys = [] for search_result in search_results: keys += search_result[1][cfg['LDAP_FIELD_MATCH']] for key in keys: try: allowed_groups += cfg['LDAP_LOOKUP_TABLE'][key] # Using * means the user will have access to everything if ('*' in allowed_groups or params['group'] in allowed_groups): current_app.logger.debug( 'User %s successfully authenticated for group' ' %s via ldap group %s.' % ( username, params['group'], key)) # This is the ONLY place that should set True has_access = True except KeyError, ke: current_app.logger.debug( 'There is no configured info for ldap group ' 'for %s. Moving on ...' % ke) current_app.logger.debug( 'User %s has access to these groups: %s' % ( username, allowed_groups)) if has_access: return (True, keys) current_app.logger.warn( 'User %s attempted to access %s though the user is not' ' in the correct group.' % ( username, params['group'])) else:
def no_authorization(username, params): # pragma nocover """ Helpful when debugging locally. Don't use this in production. """ ContextFilter.set_field('user_id', username) return (True, [])
""" The Flask App. """ import os import logging from flask import Flask, json from rerest.views import make_routes from rerest.contextfilter import ContextFilter CONFIG_FILE = os.environ.get('REREST_CONFIG', 'settings.json') app = Flask('rerest') ContextFilter.set_field('app_component', 'rerest') app.config.update(json.load(open(CONFIG_FILE, 'r'))) log_handler = logging.FileHandler(app.config.get('LOGFILE', 'rerest.log')) log_level = app.config.get('LOGLEVEL', None) if not log_level: log_level = 'INFO' log_handler.setLevel(logging.getLevelName(log_level)) # ADDITION OF NEW FIELDS: # # If you add new fields to the logging format string you must add them # to the rerest.contextfilter.ContextFilter class's 'FIELDS' variable # as well. Failure to do so will result in KeyError's during logging log_handler.setFormatter(logging.Formatter(