def api_view_lexeme_collections(): """ returns a list of JSON lexeme collection objects parameters: count defines the number returned, tags is a comma-separated list of tags that all returned lcs must have """ # extract the count parameter (number of lcs to display) count = request.args.get('count') if count is None: count = 10 try: count = int(count) except ValueError: count = 10 # extract the type parameter (type of lexeme) typ_param = request.args.get('type') if typ_param == 'sentence': typ = 'sentence' db_collection = MONGO.db.paragraphs else: #default is Word typ = 'word' db_collection = MONGO.db.sentences # extract the tag list parameter, if any, and convert to a list of strings tags = request.args.get('tags') if tags is None or len(tags) == 0: tags = [] else: tags = tags.split(',') # query the database for complete lexeme collections # using an AND of all provided tags if len(tags) != 0: lcs = db_collection.find({"$and": [{'complete':True}, {'tags': {"$all" :tags}}]}).sort("_id", -1).limit(count) else: lcs = db_collection.find({'complete':True}).sort("_id", -1).limit(count) # check for no results if lcs.count() < 1: return 'ERROR: No complete lexeme collections could be found', 503 # Convert the database results to the appropriate LexemeCollection objects # then construct a list of their JSON views json_list = [] for lc_bson_object in lcs: if typ == 'word': lexc = WordCollection() elif typ == 'sentence': lexc = SentenceCollection() lexc.import_json(lc_bson_object) json_list.append(lexc.view('json')) return json.dumps(json_list), 200
def get_inc_lex_collections(): """ returns a single incomplete lexeme collection from a GET http request """ # extract the type parameter (type of lexeme) typ_param = request.args.get('type') if typ_param == 'sentence': typ = 'sentence' db_collection = MONGO.db.paragraphs else: #default is Word typ = 'word' db_collection = MONGO.db.sentences # select an incomplete sentence from the database # it must either not have a key or have an empty key string lc_bson = db_collection.find_one( {'complete':False, '$or': [ {'key' : {'$exists': False}}, {'key' : ""}] }) # make sure there was really a sentence if lc_bson is None: return 'ERROR: No incomplete sentences are available.', 503 # 503 Service Unavailable # construct the appropriate type of LexemeCollection and pull in the data # from the database query if typ == 'word': lexc = WordCollection() elif typ == 'sentence': lexc = SentenceCollection() lexc.import_json(lc_bson) # check out and generate a key for this sentence key = check_out() # mark the sentence in the db as active (by giving it its key), so other # requests won't get the same sentence db_collection.update({'_id': lc_bson['_id']}, {'$set': {'key': key}}, upsert=False) # construct the object with the lexeme collection data and the key prejson = { 'lexemecollection': lexc.view('json'), 'key': key } return json.dumps(prejson), 200
def api_append_to_lexeme_collection(): """ endpoint for continuing or completing an incomplete lexeme collection In order for this to work, the client must verify that it was the one who originally sent the request for the incomplete LC, by passing back the key that was sent with it. """ try: addition = request.form["addition"] key = request.form["key"] typ_param = request.form["type"] except KeyError: return "ERROR: lexeme type, key, or addition is missing", 400 # Separate try/except for complete because it is not a required parameter try: complete = request.form["complete"] except KeyError: complete = 'false' # determine whether the user is trying to continue or complete # defaults to continue if complete == 'true': try_to_complete = True else: try_to_complete = False # extract the type parameter (type of lexeme) if typ_param == 'sentence': typ = 'sentence' db_collection = MONGO.db.paragraphs else: #default is Word typ = 'word' db_collection = MONGO.db.sentences # check that the key is not timed out # this assumes that if the key is not in LC_MAP, it has expired if not key in LC_MAP: return "ERROR: This lexeme collection has timed out", 408 # strip any whitespace from the new addition addition = addition.strip() # assume addition is one lexeme; pull it into the appropriate class # also make the LexemeCollection for later if typ == 'word': new_lexeme = Word(addition) lexc = WordCollection() elif typ == 'sentence': new_lexeme = Sentence(addition) lexc = SentenceCollection() # validate it as an ordinary or ending lexeme, depending on the complete # parameter if try_to_complete: if not new_lexeme.is_valid_end(): return 'ERROR: '+new_lexeme.get_text()+' is not a valid ending '+new_lexeme.type(), 400 else: if not new_lexeme.is_valid(): return 'ERROR: '+new_lexeme.get_text()+' is not a valid '+new_lexeme.type(), 400 # get the document in the database by the key passed in lc_bson_to_be_completed = db_collection.find_one({"key":key}) if lc_bson_to_be_completed is None: # this should never happen return 'ERROR: No lexeme collection matching your key was found in the db', 500 # get data from the query result and add in the new lexeme lexc.import_json(lc_bson_to_be_completed) lexc.append(new_lexeme) # validate it if completing the LC if try_to_complete and not lexc.validate(): # with proper validation on all API behaviors this should never happen # either return 'ERROR: The overall lexeme collection is not valid', 400 # update the document as being complete and remove the key if try_to_complete: db_collection.insert( {"lexemes": lexc.view("string"), "complete": try_to_complete, "tags":lc_bson_to_be_completed['tags']}) db_collection.remove({"_id": lc_bson_to_be_completed['_id']}) else: db_collection.update( {"_id": lc_bson_to_be_completed['_id']}, {'$set': {"complete":try_to_complete, "lexemes":lexc.view("string")}, '$unset': {"key": ""}}, upsert=False) # remove it from the timeout list del LC_MAP[key] # return 200 OK return "Successfully appended to the lexeme collection", 200