def search(request): name=request.GET["username"] users=Account.objects(username=name).as_pymongo() if users.count()==0: return HttpResponse(ju.dumps([])) users=list(users) for user in users: user["portrait"]=endpoint+"users/getPortrait?id="+str(user['_id']) del(user['buylog']) del(user['_types']) del(user['is_active']) del(user['is_superuser']) del(user['is_staff']) del(user['last_login']) del(user['_cls']) del(user['password']) del(user['friends']) del(user['date_joined']) id=user['_id'] if str(id) in request.user.friends: user['followed']=True else: user['followed']=False del(user['_id']) return HttpResponse(ju.dumps(users))
def get_followed(request, userID): # Dummy try: collection = db.user result = collection.find_one({"userID":userID}) merchants = result['followed'] data = [] merchant = db.merchants for x,y in merchants.items(): mm = merchant.find_one({"vendor_id":int(x)}) try: if 'address' in mm and 'area' in mm['address']: address = mm['address']['area'] elif 'address' in mm: address = mm['address']['text'] else: address = mm['web_address'] except: address = '' temp = { "vendor_name": mm['vendor_name'], "address": address, "cat": mm['cat'], "rating": mm['rating'], "date": y } data.append(temp) return HttpResponse(dumps({"data":data}), content_type='application/json') except Exception, e: return HttpResponse(dumps({"sucess":0, "error": "exception: "+str(e)}), content_type='application/json')
def sendmessage(): print "------------datetime-----------" print datetime.now() ts = int(time.time()) if not request.json["sender"] or not request.json["receiver"] or not request.json["message"]: return dumps({"status": "failed", "data": 0}) accounts = app.data.driver.db["messages"] message = { "sender": ObjectId(request.json["sender"]), "receiver": ObjectId(request.json["receiver"]), "seen": False, "message": request.json["message"], "timestamp": ts, "message_created": datetime.now(), } # accounts.update({'timestamp':1425368551},{'$set':{'seen':True}}) data = accounts.insert(message) if data: return dumps({"status": "ok", "data": data}) return dumps({"status": "failed", "data": 0})
def convert_id(data): if isinstance(data, list): for single_data in data: single_data['_id'] = str(single_data['_id']) return dumps(data) data['_id'] = str(data['_id']) return dumps(data)
def get_message(message_id): message = app.db.messages.find_one({'_id': ObjectId(message_id)}) if message is None: return json_response(json_util.dumps({'error': "Message with id '%s' does not exist" % message_id})) return json_response(json_util.dumps(message))
def addFavourite(): user_id = request.json['user_id'] fav_details =[] fav = request.json['property_id'] owner_id = request.json['owner_id'] dict = {"property_id":fav,"owner_id":owner_id} favourites = getCollection('favourites') results = favourites.find({"user_id":user_id},{'_id': False}) for record in results: user = record['user_id'] print user if user == user_id: fav_details = record['FavDetails'] fav_details.append(dict) user = { "user_id":user_id, "FavDetails":fav_details } if len(fav_details) == 0: favourites.insert_one(user) else: updateId = favourites.update_one({"user_id":user_id},{"$set": {"FavDetails":fav_details}},upsert = True) print updateId if updateId: return dumps(user) else: return dumps([{"error":"Error Occured"}])
def deleteFavourite(user_id,property_id,owner_id=""): favourites = getCollection('favourites') search_criteria = {} if 'user_id' in request.args: search_criteria['user_id'] = user_id if 'property_id' in request.args: search_criteria['FavDetails.property_id'] = property_id if 'owner_id' in request.args: search_criteria['FavDetails.owner_id'] = owner_id results=favourites.find(search_criteria,{'_id':False}) dict = {"property_id":property_id,"owner_id":owner_id} for record in results: user = record['user_id'] print user if user == user_id: fav_details = record['FavDetails'] if len(fav_details) == 0: favourites.remove({"user_id":record['user_id']}) if dict in fav_details: fav_details.remove(dict) updateId = favourites.update_one({"user_id":user_id},{"$set": {"FavDetails":fav_details}},upsert = True) if updateId: return dumps([{"Status" : "OK"}]) else: return dumps([{"error":"Error Occured"}])
def getTrendingProperty(): postings = getCollection('postings') maxViewCountProperty = postings.find({'is_rented_or_cancel':False},{'_id':False}).sort('view_count',pymongo.DESCENDING).limit(1) if maxViewCountProperty.count()>0: return dumps(maxViewCountProperty) else: return dumps([])
def saveSearch(): search={ "id":request.json['id'], "user":request.json['user'], "name":request.json['name'], "frequency":request.json['frequency'], "keyword":request.json['keyword'], "city":request.json['city'], "zipcode":toInt(request.json['zipcode']), "minrent":toInt(request.json['minrent']), "maxrent":toInt(request.json['maxrent']), "staticmapurl":request.json['staticmapurl'], "propertyType":request.json['propertyType'], "haskeyword":strToBool(str(request.json['haskeyword'])), "hascity":strToBool(str(request.json['hascity'])), "haszipcode":strToBool(str(request.json['haszipcode'])), "hasminrent":strToBool(str(request.json['hasminrent'])), "hasmaxrent":strToBool(str(request.json['hasmaxrent'])), "haspropertyType":strToBool(str(request.json['haspropertyType'])) } searches=getCollection('searches') insertedId=searches.insert_one(search) if insertedId: return dumps([search]) else: return dumps([{"error":"Error Occured"}])
def getNewsList(request): # depend on the friends depart num = request.GET['num'] allFriends = Account.objects(pk__in=request.user.friends).all() number = News.objects(author__in=allFriends).count() if int(num)*15>number: result=[] return HttpResponse(dumps(result)) result = News.objects(author__in=allFriends)[int(num)*15:int(num)*15 + 15].order_by("-time").as_pymongo() result = list(result) for news in result: del(news['_types']) del(news['_cls']) news['picture'] = endpoint + "news/getPicture?id=" + str(news['_id']) news['voice'] = endpoint + "news/getVoice?id=" + str(news['_id']) uid = news['author'] user = News.objects(author=uid).first().author gid = news['good'] good = News.objects(good=gid).first().good news['good'] = endpoint + "goods/getGoods?id=" + str(good.pk) news['author'] = {"portrait": endpoint + "users/getPortrait?id=" + str(user.pk), "name": user.username} news['comments'] = endpoint + "news/getComments?id=" + str(news['_id']) # news['_id'] = endpoint + "news/getNewsDetail?id=" + str(news['_id']) del(news['_id']) news['time']=str(news['time']) result = dumps(result) return HttpResponse(result)
def get_recipes(): response.content_type = 'application/json' search_criteria = request.get_header('filter') if search_criteria is None: return dumps([p.__dict__ for p in db_utils.get_all_recipes()]) else: return dumps([p.__dict__ for p in db_utils.find_recipes(loads(search_criteria))])
def organizations(organization_id=None): if organization_id is not None: return dumps({ "organization_name": Organization.objects(id=organization_id) .first().organization_name }) return dumps(Organization.dump_list())
def test_regex(self): for regex_instance in ( re.compile("a*b", re.IGNORECASE), Regex("a*b", re.IGNORECASE)): res = self.round_tripped({"r": regex_instance})["r"] self.assertEqual("a*b", res.pattern) res = self.round_tripped({"r": Regex("a*b", re.IGNORECASE)})["r"] self.assertEqual("a*b", res.pattern) self.assertEqual(re.IGNORECASE, res.flags) unicode_options = re.I|re.M|re.S|re.U|re.X regex = re.compile("a*b", unicode_options) res = self.round_tripped({"r": regex})["r"] self.assertEqual(unicode_options, res.flags) # Some tools may not add $options if no flags are set. res = json_util.loads('{"r": {"$regex": "a*b"}}')['r'] self.assertEqual(0, res.flags) self.assertEqual( Regex('.*', 'ilm'), json_util.loads( '{"r": {"$regex": ".*", "$options": "ilm"}}')['r']) # Check order. self.assertEqual( '{"$regex": ".*", "$options": "mx"}', json_util.dumps(Regex('.*', re.M | re.X))) self.assertEqual( '{"$regex": ".*", "$options": "mx"}', json_util.dumps(re.compile(b'.*', re.M | re.X)))
def dumps_bson(bson_obj, pretty=False): if bson_obj: #print type(bson_obj) if pretty: return json_util.dumps(bson_obj, ensure_ascii=False, indent=2, encoding="utf-8", sort_keys=True) else: return json_util.dumps(bson_obj, ensure_ascii=False, encoding="utf-8")
async def intake(warehouse, product, quantity): print("Intaking product {} into warehouse {}. Quantity: {}".format( product, warehouse, quantity )) warehouse_data = warehouses.find_one({'name': warehouse}) if warehouse_data is None: return {'error': 'No warehouse with name {}'.format(warehouse)} product_client = HARPCClient(broker_url=broker_url, service_name=product_service, timeout=5) product_data = loads(await product_client.get_by_name(name=product)) intake_data = { 'warehouse_id': warehouse_data['_id'], 'product_id': product_data['_id'] } result = inventories.find_one(intake_data) if result is None: print('Inserting new intake') intake_data['quantity'] = quantity inserted = inventories.insert_one(intake_data) return dumps(inserted.inserted_id) else: print('Updating existing intake') quantity += result['quantity'] inventories.update_one( {'_id': result['_id']}, {'$set': {'quantity': quantity}} ) return dumps(result['_id'])
def friendSuggestions(): try: resultUsers = [] peoples = app.data.driver.db['people'] #data = peoples.find({ "location.state": {"$regex": user_data['location']['state'], "$options" :"$i" }}) data = peoples.find({ "$and" : [ { "location.state": {"$regex":request.json['location'], "$options" :"$i" }}, { "friends" : {"$nin":request.json['friends'] }}, { "username" : {"$ne" : request.json['username']}}, {"notifications.friendid":{"$ne": ObjectId(request.json['_id'])}}, {"send_add_requests" :{"$nin":[ObjectId(request.json['_id'])]}} ]},{"_id":1, "name":1, "picture":1}).limit(4) for temp in data: resultUsers.append(filterIdFields(temp, _id = True )) if(len(resultUsers) >= 4): return dumps({'data':resultUsers, 'status': 200}) data2 = peoples.find({ "$and" : [ { "friends" : {"$nin": request.json['friends'] }}, { "username" : {"$ne" : request.json['username']}}, {"notifications.friendid":{"$ne": ObjectId(request.json['_id'])}}, {"send_add_requests" :{"$nin":[ObjectId(request.json['_id'])]}} ]},{"_id":1, "name":1, "picture":1}).limit(4) for temp in data2: resultUsers.append(filterIdFields(temp, _id = True)) if(len(resultUsers) >= 4): return dumps({'data':resultUsers, 'status': 200}) return json.dumps({'data': 'no users found', 'status': False}) except Exception as e: response = jsonify(status="")
def _execute_command(self, command): response = dict() command = json.loads(dumps(command)) print 'DATABASE COMMAND', command keys = command.keys() if 'count' in keys: result = self.collection.count() print 'COUNT RESULT', result # TODO finish elif 'filemd5' in keys: collection = '%s.chunks' % command['root'] file_id = loads(json.dumps(command['filemd5'])) chunks = list() n = 0 chunk = self.execute(collection, Database._QUERY, { 'files_id': file_id, 'n': n }) while len(chunk) > 0: chunks.append(json.loads(dumps(chunk))) n += 1 chunk = self.execute(collection, Database._QUERY, { 'files_id': file_id, 'n': n }) if len(chunks) > 0: filemd5 = hashlib.md5(dumps(chunks)).hexdigest() response.update(md5=filemd5) else: raise Exception(u'No chunks found for file with id %s' % file_id) return response
def test_document_etag_ignore_fields(self): test = {'key1': 'value1', 'key2': 'value2'} ignore_fields = ["key2"] test_without_ignore = {'key1': 'value1'} challenge = dumps(test_without_ignore, sort_keys=True).encode('utf-8') with self.app.test_request_context(): self.assertEqual(hashlib.sha1(challenge).hexdigest(), document_etag(test, ignore_fields)) # not required fields can not be present test = {'key1': 'value1', 'key2': 'value2'} ignore_fields = ["key3"] test_without_ignore = {'key1': 'value1', 'key2': 'value2'} challenge = dumps(test_without_ignore, sort_keys=True).encode('utf-8') with self.app.test_request_context(): self.assertEqual(hashlib.sha1(challenge).hexdigest(), document_etag(test, ignore_fields)) # ignore fiels nested using doting notation test = {'key1': 'value1', 'dict': {'key2': 'value2', 'key3': 'value3'}} ignore_fields = ['dict.key2'] test_without_ignore = {'key1': 'value1', 'dict': {'key3': 'value3'}} challenge = dumps(test_without_ignore, sort_keys=True).encode('utf-8') with self.app.test_request_context(): self.assertEqual(hashlib.sha1(challenge).hexdigest(), document_etag(test, ignore_fields))
def get_product(p_id=None): if p_id==None: return jsonify({ 'status':400, 'error':'No product ID supplied', 'result':{ 'message':'No product ID supplied' } }),400 product=db['products'].find_one({'_id':objectid.ObjectId(p_id)}) if product is None: return json_util.dumps({ 'status':404, 'error':'Product not found!', 'result':{ 'message':'Product not found', } }),404 return json_util.dumps({ 'status':200, 'error':None, 'result':{ 'message':'Successfully returned product', 'data':product } }),200
def releaseProject(msg, ws): u"""Запуск релизной команды.""" project = msg.data.split(':')[1] projects = getProjectsList() project = getProject(project) if project: pass else: return ws.send_str(dumps({ 'type': 'release', 'status': 'fail', 'data': { 'reason': 'No project with this name' } })) logpath = getLogPath(project) if not os.path.isdir(logpath): return ws.send_str(dumps({ 'type': 'release', 'status': 'fail', 'data': { 'reason': 'No builds' } })) builds = os.listdir(logpath) lastBuild = sorted(builds, reverse=True)[-1] step = { "name": "release", "description": "Deploy as release", "cmd": project['release_action'] } print(lastBuild, step) runBuildStep(project, step, lastBuild, {'lastBuild': lastBuild})
def rmkdir(file): try: st=os.stat(base+file) r = requests.get(url + 'mkdir?file=%s&source=%s&group=%s&ctime=%s' % (file,source,group,st.st_ctime), headers=header) print dumps(r.json(),indent=2) except Exception as e: print str(e)
def run(): cursor = db.junctions.find({"_id":"30/7/1"}) json_str =json_util.dumps(cursor) junctions =json_util.loads(json_str) junctions = sorted(junctions, key=lambda k: k['route']) d1 = None for junction in junctions: d1 = process(junction["_id"]) cursor = db.junctions.find({"_id":"13/2/1"}) json_str =json_util.dumps(cursor) junctions =json_util.loads(json_str) junctions = sorted(junctions, key=lambda k: k['route']) d2 = None for junction in junctions: d2 = process(junction["_id"]) cursor = db.junctions.find({"_id":"17/6/1"}) json_str =json_util.dumps(cursor) junctions =json_util.loads(json_str) junctions = sorted(junctions, key=lambda k: k['route']) d3 = None for junction in junctions: d3 = process(junction["_id"]) #d3.plot() d = df({ "Low - 30/7/1": [d1["STT"].quantile(i/100) for i in range(1,99,10)], "Medium - 13/2/1":[d2["STT"].quantile(i/100) for i in range(1,99,10)], "High - 17/6/1":[d3["STT"].quantile(i/100) for i in range(1,99,10)] }) d.plot(ylim=[0,600]) plt.show()
def user(request): decrypted = booleanize(request.REQUEST.get('decrypted', False)) accepted_scopes = set(['connector_raw.all_data', 'connector_raw.all_data_researcher']) auth = authorization_manager.authenticate_token(request) if 'error' in auth: log.error(audit.message(request, auth)) return HttpResponse(json.dumps(auth), status=401) auth_scopes = set([x for x in auth['scope']]) if len(accepted_scopes & auth_scopes) == 0: log.error(audit.message(request, {'error':'token not authorized for any accepted scope %s'%str(list(accepted_scopes))})) return HttpResponse(json.dumps({'error':'token not authorized for any accepted scope %s'%str(list(accepted_scopes))}), status=401) is_researcher = False for s in auth_scopes: if s == 'connector_raw.all_data_researcher': is_researcher = True users_to_return = buildUsersToReturn(auth['user'], request, is_researcher = is_researcher) roles = [] try: roles = [x.role for x in UserRole.objects.get(user=auth['user']).roles.all()] except: pass own_data = False if len(users_to_return) == 1 and users_to_return[0] == auth['user'].username: own_data = True return userBuild(request, users_to_return, decrypted = decrypted, own_data = own_data, roles = roles)
def test_uuid(self): doc = {'uuid': uuid.UUID('f47ac10b-58cc-4372-a567-0e02b2c3d479')} self.round_trip(doc) self.assertEqual( '{"uuid": {"$uuid": "f47ac10b58cc4372a5670e02b2c3d479"}}', json_util.dumps(doc)) self.assertEqual( '{"uuid": ' '{"$binary": "9HrBC1jMQ3KlZw4CssPUeQ==", "$type": "03"}}', json_util.dumps( doc, json_options=json_util.STRICT_JSON_OPTIONS)) self.assertEqual( '{"uuid": ' '{"$binary": "9HrBC1jMQ3KlZw4CssPUeQ==", "$type": "04"}}', json_util.dumps( doc, json_options=json_util.JSONOptions( strict_uuid=True, uuid_representation=STANDARD))) self.assertEqual( doc, json_util.loads( '{"uuid": ' '{"$binary": "9HrBC1jMQ3KlZw4CssPUeQ==", "$type": "03"}}')) for uuid_representation in ALL_UUID_REPRESENTATIONS: options = json_util.JSONOptions( strict_uuid=True, uuid_representation=uuid_representation) self.round_trip(doc, json_options=options) # Ignore UUID representation when decoding BSON binary subtype 4. self.assertEqual(doc, json_util.loads( '{"uuid": ' '{"$binary": "9HrBC1jMQ3KlZw4CssPUeQ==", "$type": "04"}}', json_options=options))
def post(self): """ add a new blog """ blog = loads(self.request.body.decode("utf-8")) if not blog['name']: self.write(dumps({'status':-1,'error':'name is mandatory'})) return #create a slug for the blog slug = slugify(blog['name']) #make sure slug in unique in blog collection # the following request will return all slug in the collection blog_slugs = self._db['blog'].distinct('slug') nslug = slug i=0 while nslug in blog_slugs: nslug = '{}-{}'.format(slug, i) i+=1 blog['slug']=nslug try: self._db['blog'].insert(blog) self.write({'status':0,'error':'','slug':blog['slug']}) except Exception as e: self.write(dumps({'status':-2,'error':str(e)}))
def JsonEncode(str,transform=False): if isinstance(str,pymongo.cursor.Cursor) or isinstance(str,list) or isinstance(str,pymongo.command_cursor.CommandCursor): result = [] for s in str: if s.has_key('_id'): if transform == False: del s['_id'] else: try: s['_id'] = json.loads(dumps(s['_id']))['$oid'] except Exception: pass result.append(s) elif isinstance(str,dict): if str.has_key('_id'): if transform == False: del str['_id'] else: try: str['_id'] = json.loads(dumps(str['_id']))['$oid'] except Exception: pass result = str elif str is None: result = None elif len(str) == 0: result = str return result
def dump(str): result = None if isinstance(str,pymongo.cursor.Cursor) or isinstance(str,list) or isinstance(str,pymongo.command_cursor.CommandCursor): result = [] for _s in str: if type(_s) == type({}): s = {} for (k,v) in _s.items(): if type(v) == type(ObjectId()): s[k] = json.loads(dumps(v))['$oid'] elif type(v) == type(datetime.datetime.utcnow()): s[k] = v.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] else: s[k] = v else: s = _s result.append(s) elif isinstance(str,dict): for (k,v) in str.items(): if type(v) == type(ObjectId()): str[k] = json.loads(dumps(v))['$oid'] elif type(v) == type(datetime.datetime.utcnow()): str[k] = v.strftime("%Y-%m-%d %H:%M:%S.%f")[:-3] result = str elif str is None: result = None elif len(str) == 0: result = str return result
def userBuild(request, users_to_return, decrypted = False, own_data = False, roles = []): _start_time = time.time() pretty = booleanize(request.REQUEST.get('pretty', False)) response = {} response['meta'] = {} db = db_wrapper.DatabaseHelper() collection= 'device_inventory' response['results'] = [x['user'] for x in db.execute_named_query(NAMED_QUERIES["get_unique_users_in_device_inventory"], None) if x['user'] in users_to_return or 'all' in users_to_return] response['meta']['execution_time_seconds'] = time.time()-_start_time response['meta']['status'] = {'status':'OK','code':200, 'desc':''} if decrypted: pass if pretty: log.info(audit.message(request, response['meta'])) return render_to_response('pretty_json.html', {'response': json.dumps(response, indent=2)}) else: log.info(audit.message(request, response['meta'])) return HttpResponse(json.dumps(response), content_type="application/json", status=response['meta']['status']['code']) return HttpResponse('hello decrypted')
def user_exist(request): try: result = dict() data = json.loads(request.body) email = data['email'] doc = db.user.find_one({"email": email}) if doc is not None: result['success'] = '1' result['userID'] = doc['userID'] result['name'] = doc['fname'] + ' ' + doc['lname'] result['email'] = doc['email'] if 'cname' in doc: result['cname'] = doc['cname'] else: result['cname'] = "" regid = data['regId'] if regid not in doc['regId']: db.user.update_one({"email": email}, {"$push": {"regId": regid}}) return HttpResponse(dumps(result), content_type="application/json") else: result['success']='0' result['reason'] = "NO INFORMATION FOUND FOR GIVEN EMAIL : "+ email return HttpResponse(dumps(result), content_type="application/json") except Exception, e: return HttpResponse(dumps({"success": 0, "exception": str(e)}))
def create(roomname, x, y): # create room in database try: connection = MongoClient() db = connection.meetain.rooms new_room = { "roomname": roomname, "roomID": roomID(), # geting uniqeue ID "player1ID": "", "player1name": "", "player2ID": "", "player2name": "", "player3ID": "", "player3name": "", "player4ID": "", "player4name": "", "playerleft": "", "numberofplayers": 0, "gamestatus": "", "x": x, "y": y, "time": datetime.now(), "gamemode": "1", "player1timestamp": "", "player2timestamp": "", "player3timestamp": "", "player4timestamp": "" } # create new room record db.insert(new_room) # added to the database connection.close() # logging return dumps({"status": "OK", "roomID": new_room['roomID']}) except: return dumps({"status": "NO", "roomID": None})
def find_song(id): song = mongo.db.songs.find_one({'_id': ObjectId(id)}) resp = dumps(song) return resp
def getNewsSummariesForUser(user_id, page_num): page_num = int(page_num) begin_index = (page_num - 1) * NEWS_LIST_BATCH_SIZE end_index = page_num * NEWS_LIST_BATCH_SIZE db = mongodb_client.get_db() user_profile = db['user_preference_model'].find_one({"userId": user_id}) interests = [] if 'interest' in user_profile: # Get interest for the user interests = user_profile['interest'] # The final list of news to be returned. sliced_news = [] if redis_client.get(user_id) is not None: news_digests = pickle.loads(redis_client.get(user_id)) # If begin_index is out of range, this will return empty list; # If end_index is out of range (begin_index is within the range), this # will return all remaining news ids. sliced_news_digests = news_digests[begin_index:end_index] print sliced_news_digests db = mongodb_client.get_db() sliced_news = list(db[NEWS_TABLE_NAME].find( {'digest': { '$in': sliced_news_digests }})) else: # sort by publishedAt total_news = list(db[NEWS_TABLE_NAME].find().sort([ ('publishedAt', -1) ]).limit(NEWS_LIMIT)) # Get preference for the user preference = news_recommendation_service_client.getPreferenceForUser( user_id) for news in total_news: news['level'] = 0 if preference is not None and len(preference) > 0: level = 17 for item in preference: if news['class'] == item: news['level'] = level level = level - 1 # sort by preference for news in total_news: for interest in interests: if news['class'] == interest: news['level'] = 18 total_news.sort(key=lambda x: x['level'], reverse=True) total_news_digests = map(lambda x: x['digest'], total_news) redis_client.set(user_id, pickle.dumps(total_news_digests)) redis_client.expire(user_id, USER_NEWS_TIME_OUT_IN_SECONDS) sliced_news = total_news[begin_index:end_index] for news in sliced_news: # Remove text field to save bandwidth. del news['text'] if news['publishedAt'].date() == datetime.today().date(): news['time'] = 'today' return json.loads(dumps(sliced_news))
def queryMongoRSID(db, rsid): query_results = db.dbsnp151.find_one({"id": rsid}) query_results_sanitized = json.loads(json_util.dumps(query_results)) return query_results_sanitized
#!/usr/local/bin/python from pydrill.client import PyDrill import json from bson.json_util import dumps from bson import json_util #print "Content-type: application/json\n\n"; print "Content-type: text/html\n\n" #print """<p>hehy</p>""" #drill = PyDrill(host='localhost', port=8047) from pymongo import MongoClient drill = PyDrill(host='localhost', port=8047) if not drill.is_active(): raise ImproperlyConfigured('Please run Drill first') yelp_reviews = drill.query(''' select avg(t.latitude) as latitude, avg(t.longitude) as longitude from `mongo.274_BI`.`yelp_dataset`t where true=repeated_contains(categories,'Bar')and t.stars>3.5 and t.city='Montreal' ''') print dumps(yelp_reviews) # pandas dataframe
def queryInfos(request): dbd = myclient['dongcang'] cold = dbd["redirect"] result = cold.find() return HttpResponse(json_util.dumps(result).encode('utf-8').decode('unicode_escape'), content_type="application/json")
def cursor_to_dict_converter(cursor): return loads(dumps(cursor))
def run_test(self): for valid_case in case_spec.get('valid', []): description = valid_case['description'] if description in _TESTS_TO_SKIP: continue # Special case for testing encoding UUID as binary subtype 0x04. if description.startswith('subtype 0x04'): encode_extjson = to_extjson_uuid_04 encode_bson = to_bson_uuid_04 else: encode_extjson = to_extjson encode_bson = to_bson cB = binascii.unhexlify( valid_case['canonical_bson'].encode('utf8')) cEJ = valid_case['canonical_extjson'] rEJ = valid_case.get('relaxed_extjson') dEJ = valid_case.get('degenerate_extjson') lossy = valid_case.get('lossy') decoded_bson = decode_bson(cB) if not lossy: # Make sure we can parse the legacy (default) JSON format. legacy_json = json_util.dumps( decoded_bson, json_options=json_util.LEGACY_JSON_OPTIONS) self.assertEqual(decode_extjson(legacy_json), decoded_bson) if deprecated: if 'converted_bson' in valid_case: converted_bson = binascii.unhexlify( valid_case['converted_bson'].encode('utf8')) self.assertEqual(encode_bson(decoded_bson), converted_bson) self.assertJsonEqual( encode_extjson(decode_bson(converted_bson)), valid_case['converted_extjson']) # Make sure we can decode the type. self.assertEqual(decoded_bson, decode_extjson(cEJ)) if test_key is not None: self.assertIsInstance(decoded_bson[test_key], _DEPRECATED_BSON_TYPES[bson_type]) continue # Jython can't handle NaN with a payload from # struct.(un)pack if endianness is specified in the format string. if not (sys.platform.startswith("java") and description == 'NaN with payload'): # Test round-tripping canonical bson. self.assertEqual(encode_bson(decoded_bson), cB) self.assertJsonEqual(encode_extjson(decoded_bson), cEJ) # Test round-tripping canonical extended json. decoded_json = decode_extjson(cEJ) self.assertJsonEqual(encode_extjson(decoded_json), cEJ) if not lossy: self.assertEqual(encode_bson(decoded_json), cB) # Test round-tripping degenerate bson. if 'degenerate_bson' in valid_case: dB = binascii.unhexlify( valid_case['degenerate_bson'].encode('utf8')) self.assertEqual(encode_bson(decode_bson(dB)), cB) # Test round-tripping degenerate extended json. if dEJ is not None: decoded_json = decode_extjson(dEJ) self.assertJsonEqual(encode_extjson(decoded_json), cEJ) if not lossy: self.assertEqual(encode_bson(decoded_json), cB) # Test round-tripping relaxed extended json. if rEJ is not None: self.assertJsonEqual(to_relaxed_extjson(decoded_bson), rEJ) decoded_json = decode_extjson(rEJ) self.assertJsonEqual(to_relaxed_extjson(decoded_json), rEJ) for decode_error_case in case_spec.get('decodeErrors', []): with self.assertRaises(InvalidBSON): decode_bson( binascii.unhexlify( decode_error_case['bson'].encode('utf8'))) for parse_error_case in case_spec.get('parseErrors', []): if bson_type == '0x13': self.assertRaises(DecimalException, Decimal128, parse_error_case['string']) elif bson_type == '0x00': description = parse_error_case['description'] if description in _NON_PARSE_ERRORS: decode_extjson(parse_error_case['string']) else: try: decode_extjson(parse_error_case['string']) raise AssertionError('exception not raised for test ' 'case: ' + description) except (ValueError, KeyError, TypeError, InvalidId): pass elif bson_type == '0x05': try: decode_extjson(parse_error_case['string']) raise AssertionError('exception not raised for test ' 'case: ' + description) except (TypeError, ValueError): pass else: raise AssertionError('cannot test parseErrors for type ' + bson_type)
def get_portfolio(): portfolio = dumps(mongo.db.portfolio.find({}, {'_id': False})) return json.loads(portfolio)
def api_user_by_id(user_id): user = USERS.find({"id": int(user_id)}) return dumps(user)
def Display_songs(): songs = mongo.db.songs.find() resp = dumps(songs) return resp
def admin_current_orders(): data = mongo.db.orders.find({"order_status": "current"}) return dumps(data)
def send_portfolio(sender_id): portfolio = mongo.db.portfolio.find({}, {'_id': False}) for security in portfolio: send_message(sender_id, dumps(security, indent=4))
def shipsData(): ships = mongo.db.ships.find() return dumps(ships)
def admin_past_orders(): data = mongo.db.orders.find({"order_status": "past"}) return dumps(data)
def rocketsData(): rockets = mongo.db.rockets.find() return dumps(rockets)
from pymongo import MongoClient from bson.json_util import dumps import json client = MongoClient( 'mongodb+srv://xinyi:[email protected]/test?retryWrites=true' ) db = client.EnglishMore try: contacts = db.topicData.find() lala = dumps(contacts) except Exception: lala = dumps({'error': str(Exception)}) import json with open('gre_words2.json') as json_file: data = json.load(json_file) keys = list(data.keys()) import pymongo myclient = pymongo.MongoClient( 'mongodb+srv://xinyi:[email protected]/test?retryWrites=true' ) mydb = myclient.EnglishMore mycol = mydb['wordsList']
def missionsData(): missions = mongo.db.missions.find() return dumps(missions)
def roadsterData(): roadster = mongo.db.roadster.find() return dumps(roadster)
def launchesData(): launches = mongo.db.launches.find() return dumps(launches)
def payloadsData(): payloads = mongo.db.payloads.find() return dumps(payloads)
def historyData(): history = mongo.db.history.find() return dumps(history)
def launchpadsData(): launchpads = mongo.db.launchpads.find() return dumps(launchpads)
def coresData(): cores = mongo.db.cores.find() return dumps(cores)
def infoData(): info = mongo.db.info.find() return dumps(info)
def actions(): response = dumps(MyBase.MData.find({"ticket": request.args["ticket"]})) return Response(response=response, status=200, mimetype='application/json')
def dragonsData(): dragons = mongo.db.dragons.find() return dumps(dragons)
def get_issues(): documents = [] for document in collection.find(): documents.append(document) return dumps(documents)
def capsulesData(): capsules = mongo.db.capsules.find() return dumps(capsules)
import pymongo import bson.json_util as json_util import json import numpy as np pairs = ['ethbtc', 'xrpbtc', 'bchbtc', 'ltcbtc', 'etcbtc', 'eosbtc', 'adabtc', 'zecbtc', 'omgbtc', 'dashbtc', 'trxbtc', 'ontbtc', 'bttbtc', 'iostbtc', 'zilbtc', 'btmbtc', 'elabtc', 'neobtc', 'qtumbtc', 'nasbtc', 'elfbtc', 'hcbtc', 'bsvbtc'] db = pymongo.MongoClient('localhost',27017) db = db['cryptowatch'] col = db['arbitrage'] res = col.find({'market':{'$exists':True}}) res = json_util.dumps(res) res = json.loads(res) markets = [] for i in res: if i['market'] not in markets: markets.append(i['market']) market_position = {} for i in range( len(markets) ): market_position[markets[i]] = i for i in pairs: res = col.find({'pair':i}) res = json_util.dumps(res)
def get_one_issues(id): document = collection.find_one({'_id': ObjectId(id)}) return dumps(document)