def find_word(word, sample='all'): if sample == 'all': return list(collection.find({'word': word})) else: connections = list(collection.find({'word': word})) connection_count = len(connections) sample_size = min(sample, connection_count) return random.sample(connections, sample_size)
def getData(): args = request.args query = {} if ("test" in args): query["test"] = args["test"] if ("type" in args): query["type"] = args["type"] if ("cardName" in args): query["cardName"] = args["cardName"] if ("subtest" in args): query["subtest"] = args["subtest"] if ("epochEnd" in args or "epochStart" in args): query["datetime"] = {} if ("epochEnd" in args): query["datetime"]["$lte"] = float(args["epochEnd"]) if ("epochStart" in args): query["datetime"]["$gte"] = float(args["epochStart"]) if ("sheetId" in args): query["sheetId"] = args["sheetId"] data = (collection.find(query, projection={ "_id": False, "sheetId": True, "data": True, "datetime": True, "test": True, "subtest": True, "type": True })).sort("datetime", 1) return json.dumps(list(data)), 200
def unlock_graph(): locked_nodes = list(collection.find({'lock': True})) for node in locked_nodes: collection.update_one( { 'word': node['word'], 'connection': node['connection'] }, {'$set': { 'lock': False }})
def list_by_keywords(keyword, page, perpage): skip = (page - 1) * perpage resultlist=collection.find({"keywords":keyword.capitalize()}).skip(skip).limit(perpage) ret = [] for i in resultlist: tmp = {} tmp['headline'] = i['headline'] tmp['keywords'] = i['keywords'] tmp['description'] = i['description'] tmp['url'] = i['url'] tmp['sha1'] = i['sha1'] tmp['viewtime'] = i['viewtime'] ret.append(tmp) return jsonify(ret)
def getSimpleData(): query = { "card": card, "test": test, "subtest": subtest, "type": type, "label": label } date = collection.find(query, projection={ "_id": False, "datetime": True, "value": True }).sort("datetime", 1) return json.dumps(list(data)), 200
def getIncludeDates(): start = [int(i) for i in request.cookies.get("start").split("-")] end = [int(i) for i in request.cookies.get("end").split("-")] query = {} query["test"] = request.cookies.get("test") query["type"] = request.cookies.get("type") query["cardName"] = {"$in": request.cookies.get("cards").split(",")} query["subtest"] = request.cookies.get("subtest") query["datetime"] = {} query["datetime"]["$lte"] = int( (datetime.datetime(year=end[0], month=end[1], day=end[2]) - datetime.datetime.utcfromtimestamp(0)).total_seconds()) query["datetime"]["$gte"] = int( (datetime.datetime(year=start[0], month=start[1], day=start[2]) - datetime.datetime.utcfromtimestamp(0)).total_seconds()) labels = request.cookies.get("labels").split(",") tempData = (collection.find(query, projection={ "_id": False, "datetime": True, "cardName": True, "sheetId": True, "data": True })).sort("datetime", 1) data = [] for d in tempData: currLabels = list(d["data"].keys()) for lab in currLabels: if lab in labels: data.append(d) response = make_response( render_template( "includeDate.html", result=[ [ datetime.datetime.utcfromtimestamp( i["datetime"]).date().isoformat() + " : " + i["cardName"], i["sheetId"] ] for i in data ], ## Pass sheet ID because it is more reliable at getting the correct datapoints than date alone navbar=genNavbar(request, "exclude"), curr="include")) return response, 200
def mongo_query(search: Dict): results = collection.find(search) for r in results: print(r)
def mongo_read(): results = collection.find() for r in results: print(r)
def view_note(): notes = [] for objectid_to_string in collection.find(): objectid_to_string['_id'] = str(objectid_to_string['_id']) notes.append(objectid_to_string) return jsonify(notes), 200