def db_collection_update(res_id, collection_name): query = update = None if request.json: query = request.json.get('query') update = request.json.get('update') upsert = request.json.get('upsert', False) multi = request.json.get('multi', False) if query is None or update is None: error = 'update requires spec and document arguments' raise MWSServerError(400, error) # Check quota size = get_collection_size(res_id, collection_name) with UseResId(res_id): # Computation of worst case size increase - update size * docs affected # It would be nice if we were able to make a more conservative estimate # of the space difference that an update will cause. (especially if it # results in smaller documents) db = get_db() affected = db[collection_name].find(query).count() req_size = len(BSON.encode(update)) * affected if size + req_size > current_app.config['QUOTA_COLLECTION_SIZE']: raise MWSServerError(403, 'Collection size exceeded') try: db[collection_name].update(query, update, upsert, multi=multi) return empty_success() except OperationFailure as e: raise MWSServerError(400, e.message)
def verify(script_name): res_id = _get_res_id() user_id = _get_user_id() if 'course_id' not in request.values or 'problem_id' not in request.values: raise MWSServerError(400, "Course or Problem not specified.") else: course_id = request.values['course_id'] problem_id = request.values['problem_id'] try: module = 'webapps.ivs.verify.scripts.{0}'.format(script_name) module = import_module(module) except ImportError as e: raise MWSServerError(404, str(e)) try: results = module.run(res_id, request) except Exception as e: _logger.error('Verification script {0} threw exception {1}'.format( script_name, str(e))) _logger.error('Traceback: {0}'.format(traceback.format_exc())) raise MWSServerError(500, type(e).__name__, str(e)) server_url = current_app.config.get('GRADING_SERVER_URL') # add grading credentials results.update({ 'api_key': current_app.config.get('GRADING_API_KEY'), 'api_secret': current_app.config.get('GRADING_API_SECRET') }) post_url = '{0}/api/v1/grade/{1}/{2}/{3}'.format(server_url, course_id, problem_id, user_id) response = requests.post(post_url, data=results) if response.status_code != 200: raise MWSServerError(response.status_code, response.text) return jsonify(**(json.loads(response.text)))
def db_collection_insert(res_id, collection_name): parse_get_json() document = request.json.get('document') if document is None: raise MWSServerError(400, "no object passed to insert!") validate_document_or_list(document) req_size = calculate_document_size(document) # Insert document with UseResId(res_id) as db: # Check quota size = db[collection_name].size() if size + req_size > current_app.config['QUOTA_COLLECTION_SIZE']: raise MWSServerError(403, 'Collection size exceeded') # Attempt Insert try: res = db[collection_name].insert(document) except (DuplicateKeyError, OperationFailure) as e: raise MWSServerError(400, str(e)) if isinstance(res, list): pretty_response = pretty_bulk_insert.format(len(res)) else: pretty_response = pretty_insert.format(1) return to_json({'pretty': pretty_response})
def db_collection_save(res_id, collection_name): parse_get_json() document = request.json.get('document') if document is None: raise MWSServerError(400, "'document' argument not found in the save request.") validate_document(document) req_size = calculate_document_size(document) # Get database with UseResId(res_id) as db: # Check quota size = db[collection_name].size() if size + req_size > current_app.config['QUOTA_COLLECTION_SIZE']: raise MWSServerError(403, 'Collection size exceeded') # Save document try: db[collection_name].save(document) return empty_success() except (InvalidId, TypeError, InvalidDocument, DuplicateKeyError) as e: raise MWSServerError(400, str(e))
def db_collection_insert(res_id, collection_name): # TODO: Ensure request.json is not None. if 'document' in request.json: document = request.json['document'] else: error = '\'document\' argument not found in the insert request.' raise MWSServerError(400, error) # Check quota size = get_collection_size(res_id, collection_name) # Handle inserting both a list of docs or a single doc if isinstance(document, list): req_size = 0 for d in document: req_size += len(BSON.encode(d)) else: req_size = len(BSON.encode(document)) if size + req_size > current_app.config['QUOTA_COLLECTION_SIZE']: raise MWSServerError(403, 'Collection size exceeded') # Insert document with UseResId(res_id): try: get_db()[collection_name].insert(document) return empty_success() except InvalidDocument as e: raise MWSServerError(400, e.message)
def _get_res_id(): if 'session_id' not in session: raise MWSServerError(400, "Invalid request (missing session)") session_id = session['session_id'] clients = get_db()[CLIENTS_COLLECTION] doc = clients.find_one({'session_id': session_id}, {'res_id': 1, '_id': 0}) if not doc: raise MWSServerError(500, "Resource id not associated with session") return doc['res_id']
def validate_document(document): if not isinstance(document, dict): raise MWSServerError(400, u"Could not validate '{0}', expected a document".format(document)) try: BSON.encode(document) except (InvalidDocument, TypeError, InvalidId, BSONError, InvalidBSON, InvalidStringData ) as e: raise MWSServerError(400, str(e))
def init(script_name): res_id = _get_res_id() try: module = 'webapps.ivs.initializers.scripts.{0}'.format(script_name) module = import_module(module) except ImportError as e: raise MWSServerError(404, str(e)) try: module.run(res_id, request) except Exception as e: _logger.error('Init script {0} threw exception {1}'.format( script_name, str(e))) _logger.error('Traceback: {0}'.format(traceback.format_exc())) raise MWSServerError(500, type(e).__name__, str(e)) return jsonify(success=True, msg='Collection initialized successfully')
def _get_user_id(): if current_app.config.get('DEBUG'): return request.values.get('user_id') if 'mws-track-id' not in request.cookies: raise MWSServerError(400, "Invalid request (missing cookie)") key = current_app.config.get('EDX_SHARED_KEY') s = Signer(key) try: user_id = s.unsign(request.cookies['mws-track-id']) except (BadSignature, TypeError) as e: _logger.exception(e) raise MWSServerError(403, "Invalid request (invalid cookie)") return user_id
def to_json(result): try: return dumps(result), 200 except ValueError: error = 'Error in find while trying to convert the results to ' + \ 'JSON format.' raise MWSServerError(500, error)
def db_cursor_next(res_id, collection_name): parse_get_json() result = {} batch_size = current_app.config['CURSOR_BATCH_SIZE'] with UseResId(res_id, db=get_keepalive_db()) as db: coll = db[collection_name] cursor_id = int(request.json.get('cursor_id')) retrieved = request.json.get('retrieved', 0) drain_cursor = request.json.get('drain_cursor', False) batch_size = -1 if drain_cursor else current_app.config['CURSOR_BATCH_SIZE'] cursor = recreate_cursor(coll, cursor_id, retrieved, batch_size) try: result['result'] = [] for i in range(batch_size): try: result['result'].append(cursor.next()) except StopIteration: result['empty_cursor'] = True break except OperationFailure as e: return MWSServerError(400, 'Cursor not found') # kill cursor on server if all results are returned if result.get('empty_cursor'): kill_cursor(coll, long(cursor_id)) return to_json(result)
def db_collection_update(res_id, collection_name): parse_get_json() query = request.json.get('query') update = request.json.get('update') upsert = request.json.get('upsert', False) multi = request.json.get('multi', False) if query is None or update is None: error = 'update requires spec and document arguments' raise MWSServerError(400, error) with UseResId(res_id) as db: # Check quota coll = db[collection_name] # Computation of worst case size increase - update size * docs affected # It would be nice if we were able to make a more conservative estimate # of the space difference that an update will cause. (especially if it # results in smaller documents) # TODO: Make this more intelligent. I'm not sure that this even makes sense. affected = coll.find(query).count() req_size = calculate_document_size(update) * affected size = db[collection_name].size() if size + req_size > current_app.config['QUOTA_COLLECTION_SIZE']: raise MWSServerError(403, 'Collection size exceeded') # Attempt Update try: res = db[collection_name].update(query, update, upsert, multi=multi) _logger.info("res: {0}".format(res)) n_matched = 0 if res.get('upserted') else res.get('n') n_upserted = 1 if res.get('upserted') else 0 n_modified = res.get('nModified', 0) if n_upserted: _id = res.get('upserted')[0].get('_id') pretty_response = pretty_upsert.format(n_matched, n_upserted, n_modified, _id) else: pretty_response = pretty_update.format(n_matched, n_modified) return to_json({'pretty': pretty_response}) except (DuplicateKeyError, InvalidDocument, InvalidId, TypeError, OperationFailure) as e: raise MWSServerError(400, str(e))
def parse_get_json(): try: request_data = request.data or request.args['data'] if request_data: request.json = loads(request_data) else: request.json = {} except (InvalidId, TypeError, ValueError) as e: raise MWSServerError(400, str(e))
def db_collection_aggregate(res_id, collection_name): parse_get_json(request) try: with UseResId(res_id): coll = get_db()[collection_name] result = coll.aggregate(request.json) return to_json(result) except OperationFailure as e: raise MWSServerError(400, e.message)
def get_collection_size(res_id, collection_name): coll = get_internal_coll_name(res_id, collection_name) try: return get_db().command({'collstats': coll})['size'] except OperationFailure as e: if 'ns not found' in e.message: return 0 else: raise MWSServerError(500, e.message)
def db_collection_aggregate(res_id, collection_name): parse_get_json() with UseResId(res_id) as db: try: result = db[collection_name].aggregate(request.json) except (InvalidId, TypeError, InvalidDocument, OperationFailure) as e: raise MWSServerError(400, str(e)) return to_json(result)
def db_collection_save(res_id, collection_name): parse_get_json() document = request.json.get('document') if document is None: raise MWSServerError(400, "'document' argument not found in the save request.") validate_document(document) req_size = calculate_document_size(document) # Get database with UseResId(res_id) as db: # Check quota size = db[collection_name].size() if size + req_size > current_app.config['QUOTA_COLLECTION_SIZE']: raise MWSServerError(403, 'Collection size exceeded') # Save document try: if "_id" not in document: res = db[collection_name].insert(document) if res: res_len = len(res) if isinstance(res, list) else 1 pretty_response = pretty_insert.format(res_len) else: res = db[collection_name].update({"_id": document["_id"]}, document, True) n_matched = 0 if res.get('upserted') else 1 n_upserted = 1 if res.get('upserted') else 0 n_modified = res.get('nModified', 0) if n_upserted: _id = res.get('upserted')[0].get('_id') pretty_response = pretty_upsert.format(n_matched, n_upserted, n_modified, _id) else: pretty_response = pretty_update.format(n_matched, n_modified) return to_json({'pretty': pretty_response}) except (InvalidId, TypeError, InvalidDocument, DuplicateKeyError) as e: raise MWSServerError(400, str(e))
def db_collection_count(res_id, collection_name): parse_get_json() query = request.json.get('query') with UseResId(res_id) as db: coll = db[collection_name] try: count = coll.find(query).count() return to_json({'count': count}) except InvalidDocument as e: raise MWSServerError(400, str(e))
def db_collection_save(res_id, collection_name): # TODO: Ensure request.json is not None. if 'document' in request.json: document = request.json['document'] else: error = '\'document\' argument not found in the save request.' raise MWSServerError(400, error) # Check quota size = get_collection_size(res_id, collection_name) req_size = len(BSON.encode(document)) if size + req_size > current_app.config['QUOTA_COLLECTION_SIZE']: raise MWSServerError(403, 'Collection size exceeded') # Save document with UseResId(res_id): try: get_db()[collection_name].save(document) return empty_success() except InvalidDocument as e: raise MWSServerError(400, e.message)
def db_collection_remove(res_id, collection_name): parse_get_json() constraint = request.json.get('constraint') if request.json else {} options = request.json and request.json.get('options', False) multi = not options.get('justOne') with UseResId(res_id) as db: collection = db[collection_name] try: res = collection.remove(constraint, multi=multi) pretty_response = pretty_remove.format(res.get('n')) except (InvalidDocument, InvalidId, TypeError, OperationFailure) as e: raise MWSServerError(400, str(e)) return to_json({'pretty': pretty_response})
def db_collection_remove(res_id, collection_name): parse_get_json() constraint = request.json.get('constraint') if request.json else {} just_one = request.json and request.json.get('just_one', False) with UseResId(res_id) as db: collection = db[collection_name] try: if just_one: collection.find_and_modify(constraint, remove=True) else: collection.remove(constraint) except (InvalidDocument, InvalidId, TypeError, OperationFailure) as e: raise MWSServerError(400, str(e)) return empty_success()
def db_collection_find(res_id, collection_name): parse_get_json() result = {} batch_size = current_app.config['CURSOR_BATCH_SIZE'] with UseResId(res_id, db=get_keepalive_db()) as db: limit = request.json.get('limit', 0) coll = db[collection_name] query = request.json.get('query') projection = request.json.get('projection') skip = request.json.get('skip', 0) sort = request.json.get('sort', {}) sort = sort.items() cursor = coll.find(spec=query, fields=projection, skip=skip, limit=limit) cursor.batch_size(batch_size) if len(sort) > 0: cursor.sort(sort) # count is only available before cursor is read so we include it # in the first response result['count'] = cursor.count(with_limit_and_skip=True) count = result['count'] num_to_return = min(limit, batch_size) if limit else batch_size try: result['result'] = [] for i in range(num_to_return): try: result['result'].append(cursor.next()) except StopIteration: break except OperationFailure as e: return MWSServerError(400, 'Cursor not found') # cursor_id is too big as a number, use a string instead result['cursor_id'] = str(cursor.cursor_id) # close the Cursor object, but keep the cursor alive on the server del cursor return to_json(result)
def db_collection_find(res_id, collection_name): parse_get_json() query = request.json.get('query') projection = request.json.get('projection') skip = request.json.get('skip', 0) limit = request.json.get('limit', 0) sort = request.json.get('sort', {}) sort = sort.items() with UseResId(res_id) as db: coll = db[collection_name] try: cursor = coll.find(query, projection, skip, limit) if len(sort) > 0: cursor.sort(sort) documents = list(cursor) except (InvalidId, TypeError, OperationFailure) as e: raise MWSServerError(400, str(e)) return to_json({'result': documents})
def parse_get_json(request): try: request.json = loads(request.args.keys()[0]) except ValueError: raise MWSServerError(400, 'Error parsing JSON data', 'Invalid GET parameter data')