def process(): responseBody = {'version': '1.1', 'error': None, 'result': None} requestBody = None try: # init/clear fossil cache clearCache() # read request try: requestBody = request.get_json() Logger.get('rpc').info( 'json rpc request. request: {0}'.format(requestBody)) except BadRequest: raise RequestError('ERR-R1', 'Invalid mime-type.') if not requestBody: raise RequestError('ERR-R2', 'Empty request.') if 'id' in requestBody: responseBody['id'] = requestBody['id'] # run request responseBody['result'] = ServiceRunner().invokeMethod( str(requestBody['method']), requestBody.get('params', [])) except CausedError as e: try: errorInfo = fossilize(e) except NonFossilizableException as e2: # catch Exceptions that are not registered as Fossils # and log them errorInfo = {'code': '', 'message': str(e2)} Logger.get('dev').exception('Exception not registered as fossil') # NoReport errors (i.e. not logged in) shouldn't be logged if not isinstance(e, NoReportError) and not getattr( e, '_disallow_report', False): Logger.get('rpc').exception( 'Service request failed. ' 'Request text:\r\n{0}\r\n\r\n'.format(requestBody)) if requestBody: params = requestBody.get('params', []) Sanitization._escapeHTML(params) errorInfo["requestInfo"] = { 'method': str(requestBody['method']), 'params': params, 'origin': str(requestBody.get('origin', 'unknown')) } Logger.get('rpc').debug('Arguments: {0}'.format( errorInfo['requestInfo'])) responseBody['error'] = errorInfo try: jsonResponse = dumps(responseBody, ensure_ascii=True) except UnicodeError: Logger.get('rpc').exception('Problem encoding JSON response') # This is to avoid exceptions due to old data encodings (based on iso-8859-1) responseBody['result'] = fix_broken_obj(responseBody['result']) jsonResponse = encode(responseBody) return app.response_class(jsonResponse, mimetype='application/json')
def _getAnswer(self): event_persons = [] criteria = { 'surName': self._surName, 'name': self._name, 'organisation': self._organisation, 'email': self._email } users = search_avatars(criteria, self._exactMatch, self._searchExt) if self._event: fields = { EventPerson.first_name: self._name, EventPerson.last_name: self._surName, EventPerson.email: self._email, EventPerson.affiliation: self._organisation } criteria = [ unaccent_match(col, val, exact=self._exactMatch) for col, val in fields.iteritems() ] event_persons = self._event.persons.filter(*criteria).all() fossilized_users = fossilize( sorted(users, key=lambda av: (av.getStraightFullName(), av.getEmail()))) fossilized_event_persons = map(serialize_event_person, event_persons) unique_users = { to_unicode(user['email']): user for user in chain(fossilized_users, fossilized_event_persons) } return sorted(unique_users.values(), key=lambda x: (to_unicode(x['name']).lower(), to_unicode(x['email'])))
def _getAnswer(self): results = [ g.as_legacy_group for g in GroupProxy.search(self._group, exact=self._exactMatch) ] fossilized_results = fossilize(results, IGroupFossil) for fossilizedGroup in fossilized_results: fossilizedGroup["isGroup"] = True return fossilized_results
def _getAnswer(self): event_persons = [] criteria = { 'surName': self._surName, 'name': self._name, 'organisation': self._organisation, 'email': self._email } users = search_avatars(criteria, self._exactMatch, self._searchExt) if self._event: fields = {EventPerson.first_name: self._name, EventPerson.last_name: self._surName, EventPerson.email: self._email, EventPerson.affiliation: self._organisation} criteria = [unaccent_match(col, val, exact=self._exactMatch) for col, val in fields.iteritems()] event_persons = self._event.persons.filter(*criteria).all() fossilized_users = fossilize(sorted(users, key=lambda av: (av.getStraightFullName(), av.getEmail()))) fossilized_event_persons = map(serialize_event_person, event_persons) unique_users = {to_unicode(user['email']): user for user in chain(fossilized_users, fossilized_event_persons)} return sorted(unique_users.values(), key=lambda x: (to_unicode(x['name']).lower(), to_unicode(x['email'])))
if error is None and request.method == 'POST': logger.info('API request: %s?%s', path, query) if is_response: return result serializer = Serializer.create(dformat, query_params=queryParams, pretty=pretty, typeMap=typeMap, **hook.serializer_args) if error: if not serializer.schemaless: # if our serializer has a specific schema (HTML, ICAL, etc...) # use JSON, since it is universal serializer = Serializer.create('json') result = fossilize(error) else: if serializer.encapsulate: result = fossilize( HTTPAPIResult(result, path, query, ts, complete, extra), IHTTPAPIExportResultFossil) del result['_fossil'] try: data = serializer(result) serializer.set_headers(responseUtil) return responseUtil.make_response(data) except: logger.exception('Serialization error in request %s?%s', path, query) raise
def handler(prefix, path): path = posixpath.join('/', prefix, path) clearCache() # init fossil cache logger = Logger.get('httpapi') if request.method == 'POST': # Convert POST data to a query string queryParams = [(key, [x.encode('utf-8') for x in values]) for key, values in request.form.iterlists()] query = urllib.urlencode(queryParams, doseq=1) # we only need/keep multiple values so we can properly validate the signature. # the legacy code below expects a dict with just the first value. # if you write a new api endpoint that needs multiple values get them from # ``request.values.getlist()`` directly queryParams = {key: values[0] for key, values in queryParams} else: # Parse the actual query string queryParams = dict((key, value.encode('utf-8')) for key, value in request.args.iteritems()) query = request.query_string apiKey = get_query_parameter(queryParams, ['ak', 'apikey'], None) cookieAuth = get_query_parameter(queryParams, ['ca', 'cookieauth'], 'no') == 'yes' signature = get_query_parameter(queryParams, ['signature']) timestamp = get_query_parameter(queryParams, ['timestamp'], 0, integer=True) noCache = get_query_parameter(queryParams, ['nc', 'nocache'], 'no') == 'yes' pretty = get_query_parameter(queryParams, ['p', 'pretty'], 'no') == 'yes' onlyPublic = get_query_parameter(queryParams, ['op', 'onlypublic'], 'no') == 'yes' onlyAuthed = get_query_parameter(queryParams, ['oa', 'onlyauthed'], 'no') == 'yes' scope = 'read:legacy_api' if request.method == 'GET' else 'write:legacy_api' try: oauth_valid, oauth_request = oauth.verify_request([scope]) if not oauth_valid and oauth_request and oauth_request.error_message != 'Bearer token not found.': raise BadRequest('OAuth error: {}'.format( oauth_request.error_message)) elif g.get( 'received_oauth_token' ) and oauth_request.error_message == 'Bearer token not found.': raise BadRequest('OAuth error: Invalid token') except ValueError: # XXX: Dirty hack to workaround a bug in flask-oauthlib that causes it # not to properly urlencode request query strings # Related issue (https://github.com/lepture/flask-oauthlib/issues/213) oauth_valid = False # Get our handler function and its argument and response type hook, dformat = HTTPAPIHook.parseRequest(path, queryParams) if hook is None or dformat is None: raise NotFound # Disable caching if we are not just retrieving data (or the hook requires it) if request.method == 'POST' or hook.NO_CACHE: noCache = True ak = error = result = None ts = int(time.time()) typeMap = {} status_code = None is_response = False try: used_session = None if cookieAuth: used_session = session if not used_session.user: # ignore guest sessions used_session = None if apiKey or oauth_valid or not used_session: if not oauth_valid: # Validate the API key (and its signature) ak, enforceOnlyPublic = checkAK(apiKey, signature, timestamp, path, query) if enforceOnlyPublic: onlyPublic = True # Create an access wrapper for the API key's user user = ak.user if ak and not onlyPublic else None else: # Access Token (OAuth) at = load_token(oauth_request.access_token.access_token) user = at.user if at and not onlyPublic else None # Get rid of API key in cache key if we did not impersonate a user if ak and user is None: cacheKey = normalizeQuery( path, query, remove=('_', 'ak', 'apiKey', 'signature', 'timestamp', 'nc', 'nocache', 'oa', 'onlyauthed')) else: cacheKey = normalizeQuery(path, query, remove=('_', 'signature', 'timestamp', 'nc', 'nocache', 'oa', 'onlyauthed')) if signature: # in case the request was signed, store the result under a different key cacheKey = 'signed_' + cacheKey else: # We authenticated using a session cookie. token = request.headers.get( 'X-CSRF-Token', get_query_parameter(queryParams, ['csrftoken'])) if used_session.csrf_protected and used_session.csrf_token != token: raise HTTPAPIError('Invalid CSRF token', 403) user = used_session.user if not onlyPublic else None userPrefix = 'user-{}_'.format(used_session.user.id) cacheKey = userPrefix + normalizeQuery( path, query, remove=('_', 'nc', 'nocache', 'ca', 'cookieauth', 'oa', 'onlyauthed', 'csrftoken')) # Bail out if the user requires authentication but is not authenticated if onlyAuthed and not user: raise HTTPAPIError('Not authenticated', 403) addToCache = not hook.NO_CACHE cache = GenericCache('HTTPAPI') cacheKey = RE_REMOVE_EXTENSION.sub('', cacheKey) if not noCache: obj = cache.get(cacheKey) if obj is not None: result, extra, ts, complete, typeMap = obj addToCache = False if result is None: g.current_api_user = user # Perform the actual exporting res = hook(user) if isinstance(res, current_app.response_class): addToCache = False is_response = True result, extra, complete, typeMap = res, {}, True, {} elif isinstance(res, tuple) and len(res) == 4: result, extra, complete, typeMap = res else: result, extra, complete, typeMap = res, {}, True, {} if result is not None and addToCache: ttl = api_settings.get('cache_ttl') if ttl > 0: cache.set(cacheKey, (result, extra, ts, complete, typeMap), ttl) except HTTPAPIError as e: error = e if e.getCode(): status_code = e.getCode() if result is None and error is None: # TODO: usage page raise NotFound else: if ak and error is None: # Commit only if there was an API key and no error norm_path, norm_query = normalizeQuery(path, query, remove=('signature', 'timestamp'), separate=True) uri = to_unicode('?'.join(filter(None, (norm_path, norm_query)))) ak.register_used(request.remote_addr, uri, not onlyPublic) db.session.commit() else: # No need to commit stuff if we didn't use an API key (nothing was written) # XXX do we even need this? db.session.rollback() # Log successful POST api requests if error is None and request.method == 'POST': logger.info('API request: %s?%s', path, query) if is_response: return result serializer = Serializer.create(dformat, query_params=queryParams, pretty=pretty, typeMap=typeMap, **hook.serializer_args) if error: if not serializer.schemaless: # if our serializer has a specific schema (HTML, ICAL, etc...) # use JSON, since it is universal serializer = Serializer.create('json') result = fossilize(error) else: if serializer.encapsulate: result = fossilize( HTTPAPIResult(result, path, query, ts, complete, extra), IHTTPAPIExportResultFossil) del result['_fossil'] try: data = serializer(result) response = current_app.make_response(data) content_type = serializer.get_response_content_type() if content_type: response.content_type = content_type if status_code: response.status_code = status_code return response except Exception: logger.exception('Serialization error in request %s?%s', path, query) raise
# No need to commit stuff if we didn't use an API key (nothing was written) # XXX do we even need this? db.session.rollback() # Log successful POST api requests if error is None and request.method == 'POST': logger.info('API request: %s?%s', path, query) if is_response: return result serializer = Serializer.create(dformat, query_params=queryParams, pretty=pretty, typeMap=typeMap, **hook.serializer_args) if error: if not serializer.schemaless: # if our serializer has a specific schema (HTML, ICAL, etc...) # use JSON, since it is universal serializer = Serializer.create('json') result = fossilize(error) else: if serializer.encapsulate: result = fossilize(HTTPAPIResult(result, path, query, ts, complete, extra), IHTTPAPIExportResultFossil) del result['_fossil'] try: data = serializer(result) serializer.set_headers(responseUtil) return responseUtil.make_response(data) except: logger.exception('Serialization error in request %s?%s', path, query) raise
def _getAnswer(self): results = [g.as_legacy_group for g in GroupProxy.search(self._group, exact=self._exactMatch)] fossilized_results = fossilize(results, IGroupFossil) for fossilizedGroup in fossilized_results: fossilizedGroup["isGroup"] = True return fossilized_results
def _getAnswer(self): event_persons = [] event_abstract_submitters = [] criteria = { 'surName': self._surName, 'name': self._name, 'organisation': self._organisation, 'email': self._email, 'abstract': self._abstract, 'track': self._track } users = search_avatars(criteria, self._exactMatch, self._searchExt) users2 = users if self._event: fields = { EventPerson.first_name: self._name, EventPerson.last_name: self._surName, EventPerson.email: self._email, EventPerson.affiliation: self._organisation } criteria = [ unaccent_match(col, val, exact=self._exactMatch) for col, val in fields.iteritems() ] if not self._abstract: if not self._track: event_persons = self._event.persons.filter(*criteria).all() event_abstract_submitters = event_persons else: event_abstract_submitters = EventPerson.query.join( Abstract, (Abstract.submitter_id == EventPerson.user_id) & (Abstract.event_id == self._event.id)).join( Track, (Abstract.accepted_track_id == Track.id)).with_entities( EventPerson.id, Abstract.title.label('abstract'), Track.title.label('track'), EventPerson.email, EventPerson.last_name.label('full_name'), EventPerson.first_name, EventPerson.last_name, EventPerson.title.label('title'), EventPerson.affiliation, EventPerson.phone, EventPerson.address, EventPerson.user_id).filter( Track.title.op("~*")( r'[[:<:]]{}[[:>:]]'.format( self._track))).filter( *criteria).all() elif not self._track: event_abstract_submitters = EventPerson.query.join( Abstract, (Abstract.submitter_id == EventPerson.user_id) & (Abstract.event_id == self._event.id)).with_entities( EventPerson.id, Abstract.title.label('abstract'), EventPerson.email, EventPerson.last_name.label('full_name'), EventPerson.first_name, EventPerson.last_name, EventPerson.title.label('title'), EventPerson.affiliation, EventPerson.phone, EventPerson.address, EventPerson.user_id).filter( unaccent_match( Abstract.title, self._abstract, self._exactMatch)).filter(*criteria).all() else: event_abstract_submitters = EventPerson.query.join( Abstract, (Abstract.submitter_id == EventPerson.user_id) & (Abstract.event_id == self._event.id)).join( Track, (Abstract.accepted_track_id == Track.id)).with_entities( EventPerson.id, Abstract.title.label('abstract'), Track.title.label('track'), EventPerson.email, EventPerson.last_name.label('full_name'), EventPerson.first_name, EventPerson.last_name, EventPerson.title.label('title'), EventPerson.affiliation, EventPerson.phone, EventPerson.address, EventPerson.user_id).filter( unaccent_match( Abstract.title, self._abstract, self._exactMatch)).filter( Track.title.op("~*")( r'[[:<:]]{}[[:>:]]'.format( self._track))).filter( *criteria).all() fossilized_users = fossilize( sorted(users, key=lambda av: (av.getStraightFullName(), av.getEmail()))) fossilized_abstract_submitters = map(serialize_event_person, event_abstract_submitters) for submitter in fossilized_abstract_submitters: for event_submitter in event_abstract_submitters: if (self._abstract or self._track) and submitter['id'] == event_submitter[0]: submitter['abstract'] = event_submitter[1] if self._track: submitter['track'] = event_submitter[2] for usre in fossilized_users: for usre2 in users2: if hasattr(usre2.user, 'abstract' ): #and int(usre['id']) == int(usre2.user.id): usre['abstract'] = usre2.user.abstract usre['track'] = usre2.user.track if self._eventPerson: unique_users = { to_unicode(user['email']): user for user in fossilized_abstract_submitters } else: unique_users = { to_unicode(user['email']): user for user in chain(fossilized_users, fossilized_abstract_submitters) } return sorted(unique_users.values(), key=lambda x: (to_unicode(x['name']).lower(), to_unicode(x['email'])))