def test_datetime(self): # only millis, not micros self.round_trip({"date": datetime.datetime(2009, 12, 9, 15, 49, 45, 191000, utc)}) jsn = '{"dt": { "$date" : "1970-01-01T00:00:00.000+0000"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) jsn = '{"dt": { "$date" : "1970-01-01T00:00:00.000+00:00"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) jsn = '{"dt": { "$date" : "1970-01-01T00:00:00.000Z"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) # No explicit offset jsn = '{"dt": { "$date" : "1970-01-01T00:00:00.000"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) # Localtime behind UTC jsn = '{"dt": { "$date" : "1969-12-31T16:00:00.000-0800"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) jsn = '{"dt": { "$date" : "1969-12-31T16:00:00.000-08:00"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) # Localtime ahead of UTC jsn = '{"dt": { "$date" : "1970-01-01T01:00:00.000+0100"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) jsn = '{"dt": { "$date" : "1970-01-01T01:00:00.000+01:00"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) dtm = datetime.datetime(1, 1, 1, 1, 1, 1, 0, utc) jsn = '{"dt": {"$date": -62135593139000}}' self.assertEqual(dtm, json_util.loads(jsn)["dt"]) jsn = '{"dt": {"$date": {"$numberLong": "-62135593139000"}}}' self.assertEqual(dtm, json_util.loads(jsn)["dt"])
def get_photos(fromtimestamp, totimestamp, access_token): photos = [] fields = 'album,from,id,name,created_time,name_tags,place,source,link,updated_time,tags' url = 'https://graph.facebook.com/me/photos?limit=1000&access_token=%s'%access_token #if fromtimestamp is not None: # url += '&since=%s'%fromtimestamp #url += '&until=%s'%totimestamp url += '&fields=%s'%fields url += '&type=tagged' print url f = urllib2.urlopen(url) json_string = f.read() for photo in loads(json_string)['data']: photos.append(photo) url = url[0:(len(url)-len('tagged'))] url += 'uploaded' print url f.close() f = urllib2.urlopen(url) json_string = f.read() for photo in loads(json_string)['data']: photos.append(photo) f.close() return photos
def anonymizeCallLog(self, document): try: if len(document['name']) > 0: document['name'] = json.loads(document['name'])["ONE_WAY_HASH"] document['numbertype'] = json.loads(document['numbertype'])["ONE_WAY_HASH"] document['number'] = json.loads(document['number'])["ONE_WAY_HASH"] except KeyError: pass
def test_regex(self): res = self.round_tripped({"r": re.compile("a*b", re.IGNORECASE)})["r"] self.assertEqual("a*b", res.pattern) if PY3: # re.UNICODE is a default in python 3. self.assertEqual(re.IGNORECASE | re.UNICODE, res.flags) else: self.assertEqual(re.IGNORECASE, res.flags) all_options = re.I|re.L|re.M|re.S|re.U|re.X regex = re.compile("a*b", all_options) res = self.round_tripped({"r": regex})["r"] self.assertEqual(all_options, res.flags) # Some tools may not add $options if no flags are set. res = json_util.loads('{"r": {"$regex": "a*b"}}')['r'] expected_flags = 0 if PY3: expected_flags = re.U self.assertEqual(expected_flags, res.flags) self.assertEqual( Regex('.*', 'ilm'), json_util.loads( '{"r": {"$regex": ".*", "$options": "ilm"}}', compile_re=False)['r'])
def collection_page(dbname, collname): rc = app.rc try: coll = rc.client[dbname][collname] except (KeyError, AttributeError): abort(404) status = status_id = None if request.method == 'POST': form = request.form if 'shutdown' in form: return shutdown() elif 'cancel' in form: body = json_util.loads(form['body'].strip()) status = 'canceled' status_id = str(body['_id']) elif 'save' in form: body = json_util.loads(form['body'].strip()) coll.save(body) status = 'saved ✓' status_id = str(body['_id']) elif 'add' in form: body = json_util.loads(form['body'].strip()) added = insert_one(coll, body) status = 'added ✓' status_id = str(added.inserted_id) elif 'delete' in form: body = json_util.loads(form['body'].strip()) deled = delete_one(coll, body) return render_template('collection.html', rc=rc, dbname=dbname, len=len, str=str, status=status, status_id=status_id, objectid=objectid, collname=collname, coll=coll, json_util=json_util, min=min)
def test_binary(self): bin_type_dict = {"bin": Binary(b("\x00\x01\x02\x03\x04"))} md5_type_dict = {"md5": Binary(b(" n7\x18\xaf\t/\xd1\xd1/\x80\xca\xe7q\xcc\xac"), MD5_SUBTYPE)} custom_type_dict = {"custom": Binary(b("hello"), USER_DEFINED_SUBTYPE)} self.round_trip(bin_type_dict) self.round_trip(md5_type_dict) self.round_trip(custom_type_dict) # PYTHON-443 ensure old type formats are supported json_bin_dump = json_util.dumps(bin_type_dict) self.assertTrue('"$type": "00"' in json_bin_dump) self.assertEqual(bin_type_dict, json_util.loads('{"bin": {"$type": 0, "$binary": "AAECAwQ="}}')) json_bin_dump = json_util.dumps(md5_type_dict) self.assertTrue('"$type": "05"' in json_bin_dump) self.assertEqual( md5_type_dict, json_util.loads('{"md5": {"$type": 5, "$binary":' ' "IG43GK8JL9HRL4DK53HMrA=="}}') ) json_bin_dump = json_util.dumps(custom_type_dict) self.assertTrue('"$type": "80"' in json_bin_dump) self.assertEqual(custom_type_dict, json_util.loads('{"custom": {"$type": 128, "$binary":' ' "aGVsbG8="}}')) # Handle mongoexport where subtype >= 128 self.assertEqual( 128, json_util.loads('{"custom": {"$type": "ffffff80", "$binary":' ' "aGVsbG8="}}')["custom"].subtype ) self.assertEqual( 255, json_util.loads('{"custom": {"$type": "ffffffff", "$binary":' ' "aGVsbG8="}}')["custom"].subtype )
def test_regex(self): for regex_instance in ( re.compile("a*b", re.IGNORECASE), Regex("a*b", re.IGNORECASE)): res = self.round_tripped({"r": regex_instance})["r"] self.assertEqual("a*b", res.pattern) res = self.round_tripped({"r": Regex("a*b", re.IGNORECASE)})["r"] self.assertEqual("a*b", res.pattern) self.assertEqual(re.IGNORECASE, res.flags) unicode_options = re.I|re.M|re.S|re.U|re.X regex = re.compile("a*b", unicode_options) res = self.round_tripped({"r": regex})["r"] self.assertEqual(unicode_options, res.flags) # Some tools may not add $options if no flags are set. res = json_util.loads('{"r": {"$regex": "a*b"}}')['r'] self.assertEqual(0, res.flags) self.assertEqual( Regex('.*', 'ilm'), json_util.loads( '{"r": {"$regex": ".*", "$options": "ilm"}}')['r']) # Check order. self.assertEqual( '{"$regex": ".*", "$options": "mx"}', json_util.dumps(Regex('.*', re.M | re.X))) self.assertEqual( '{"$regex": ".*", "$options": "mx"}', json_util.dumps(re.compile(b'.*', re.M | re.X)))
def put(self, _id=None, history=None): if history: abort(400) if _id: data = request.get_json() if not data: abort(400) root_args = self.root_parser.parse_args() if 'admin' in data: if data['admin'] == "true": data['admin'] = True else: data['admin'] = False user = models.User.objects.get_or_404(id=_id) user.update(**data) user = models.User.objects.get_or_404(id=_id) json = loads(user.to_json()) json['timestamp'] = json['timestamp'].strftime( '%Y-%m-%dT%H:%M:%SZ') json = JSONEncoder().encode(json) response = jsonify(loads(json)) response.status_code = 200 return response else: abort(400)
def test_partial_record1(): locate_name = ['comments'] bson_raw_id_data = '{"_id": {"$oid": "56b8da51f9fcee1b00000006"}}' array_bson_raw_data = '[{\ "_id": {"$oid": "56b8f344f9fcee1b00000018"},\ "updated_at": {"$date": "2016-02-08T19:57:56.678Z"},\ "created_at": {"$date": "2016-02-08T19:57:56.678Z"}}]' collection_name = 'a_inserts' schema_engine = get_schema_engine( collection_name ) node = schema_engine.locate(locate_name) bson_object_id = loads(bson_raw_id_data) bson_data = loads(array_bson_raw_data) whole_bson = node.json_inject_data(bson_data, bson_object_id.keys()[0], bson_object_id.values()[0]) print whole_bson tables = Tables(schema_engine, whole_bson) tables.load_all() print "tables.tables.keys()", tables.tables.keys() table_name = 'a_insert_comments' comments_t = tables.tables[table_name] print "sql_column_names", comments_t.sql_column_names assert(comments_t.sql_columns['updated_at']) id_oid = comments_t.sql_columns['id_oid'].values[0] print "id_oid", id_oid assert(id_oid == "56b8f344f9fcee1b00000018") assert(len(comments_t.sql_columns['id_oid'].values) == 1) parent_id_oid = comments_t.sql_columns['a_inserts_id_oid'].values[0] print "parent_id_oid", parent_id_oid assert(parent_id_oid == "56b8da51f9fcee1b00000006") #both tables: a_inserts, comments should be available assert(len(tables.tables.keys())==2)
def main(): #fout=open("allids","w") client = MongoClient() db =client.nba_main db2=client.nba_stream_id #org_coll=db.TEST_2014626#read from #new_coll=db.NEW_TEST#write to colls=db.collection_names() value=[] for col in colls: if 'system.' in col or 'twitter_' in col:continue cur=db[col].find({},{'entities.user_mentions.id_str':1,'in_reply_to_user_id_str':1, 'user.id_str':1, 'id_str':1, '_id':0 }) for tweet in cur: entry=dumps(tweet) tweetid=loads(entry)['id_str'] mentionid= loads(entry)['entities']['user_mentions'] for id in mentionid: if id['id_str'] not in value: value.append(id['id_str']) db2[col].insert({'UserID':id['id_str'], 'TweetId':tweetid }) replyid= loads(entry)['in_reply_to_user_id_str'] if replyid not in value and replyid: value.append(replyid) db2[col].insert({'UserID':replyid,'TweetId':tweetid }) userid= loads(entry)['user']['id_str'] if userid not in value: value.append(userid) db2[col].insert({'UserID':userid,'TweetId':tweetid})
def show_foucs_users(): if not is_login(): return jsonify({ 'status':401, 'data':'user not log in' }) user = get_current_user() focus_users = loads(user)['focus_users'] if not focus_users: return jsonify({ 'status':200, 'data':"" }) else: return_users = [] for single_user_id in focus_users: single_user = User.get_user(single_user_id) if single_user != 'null': single_user = loads(single_user) del single_user['focus_stories'] del single_user['focus_users'] del single_user['phone'] del single_user['password'] return_users.append(single_user) return jsonify({ 'status':200, 'data':convert_id(return_users) })
def collect_plane(): if is_login(): user = get_current_user() user_id = loads(user)['_id'] collect_story_id = ObjectId(json.loads(request.data)['story_id']) story = Story.get_story_by_id(collect_story_id) if story == 'null': return jsonify({ 'status':403, 'data':'invalid story id' }) story = loads(story) story['total_collections'] += 1 result = User.add_focus_story(user_id, collect_story_id) if result == '': result_story = Story.update_story(story) return jsonify( { 'status':200, 'data':'success' }) else: return jsonify({ 'status':403, 'data':result }) else: return jsonify( { 'status':401, 'data':'user not log in' })
def to_service(self, round, team, service, flag, flag_id): team = json_util.loads(json.dumps(team)) service = json_util.loads(json.dumps(service)) self.db.flags.insert_one({ 'round': round, 'team': team, 'service': service, 'flag': flag, 'flag_id': flag_id, 'stolen': False, 'timestamp': time.time() }) path = self.path_to_checkers + service['name'] + '/' + self.filename_checkers action = '' try: action = 'check' self.checker.check(team['host'], path) action = 'put' self.checker.put(team['host'], path, flag, flag_id) action = 'get' self.checker.get(team['host'], path, flag, flag_id) self.update_scoreboard(team, service, 101) except Exception as error: code, message = error.args print(error) Message.fail(team['name'] + ' ' + service['name'] + ' ' + action + ' => error (message: ' + str(message) + ')') self.update_scoreboard(team, service, code, message)
def show_foucs_stories(): if not is_login(): return jsonify({ 'status':401, 'data':'user not log in' }) user = get_current_user() focus_stories = loads(user)['focus_stories'] if not focus_stories: return jsonify({ 'status':200, 'data':"" }) else: return_stories = [] for single_story_id in focus_stories: single_story = Story.get_story_by_id(single_story_id) if single_story != 'null': single_story = loads(single_story) del single_story['paragraph_ids'] del single_story['current_owner'] return_stories.append(single_story) return jsonify({ 'status':200, 'data':convert_id(return_stories) })
def test_complete_partial_record4(): logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)-8s %(message)s') object_id_bson_raw_data = '{\ "_id": { "$oid": "56b8da59f9fcee1b00000007" }\ }' array_bson_raw_data = '{\ "comments.0": {\ "_id": {"$oid": "56b8f344f9fcee1b00000018"},\ "updated_at": "2016-02-08T19:57:56.678Z",\ "created_at": "2016-02-08T19:57:56.678Z"}\ }' dbname = 'rails4_mongoid_development' db_schemas_path = '/'.join(['test_data', 'schemas', dbname]) schemas = get_schema_engines_as_dict(db_schemas_path) schema_engine = schemas['posts'] bson_data = loads(array_bson_raw_data) object_id_bson_data = loads(object_id_bson_raw_data) partial_inserts_list = get_tables_data_from_oplog_set_command(\ schema_engine, bson_data, object_id_bson_data) tables = partial_inserts_list[0].tables assert(tables['post_comments'].sql_columns['posts_id_oid'].values[0]=="56b8da59f9fcee1b00000007") assert(tables['post_comments'].sql_columns['id_oid'].values[0]=="56b8f344f9fcee1b00000018") assert(tables['post_comments'].sql_columns['idx'].values[0]==1) assert(len(tables)==1)
def get(self, machine_id): args = self.reqparse.parse_args() if not loads(MachineModel.get_machine(DEFAULT_DB, machine_id)): return { "message":"invalid machine id" }, 400 try: begin_date = args["begin_date"] end_date = args["end_date"] except ValueError: return { "message": "invalid time args" }, 400 modules = args["module"].split(",") if not modules: return { "message":"invalid module args" }, 400 module_info = {} step = self._get_best_step(begin_date, end_date) for module in modules: module_info[module] = loads({ "cpu":CpuModel.get_cpu, "average_load":AverageLoadModel.get_average_load, "memory": MemoryModel.get_memory, "net": NetModel.get_net, "disk": DiskModel.get_disk }.get(module)(config.MONGO_DATABASE[step], machine_id, begin_date, end_date)) return module_info, 200
def post(self): data = request.get_json() if not data: abort(400) root_args = self.root_parser.parse_args() if('location' in data): location_args = self.location_parser.parse_args(req=root_args) if('location' in data): details_args = self.details_parser.parse_args(req=root_args) majorAttraction = models.MajorAttraction.objects.get_or_404( id=data['majorAttraction']) data['majorAttraction'] = majorAttraction attractionDetails = models.AttractionDetails(**data['details']) data['details'] = attractionDetails minorAttraction = models.MinorAttraction(**data) minorAttraction.save() minorAttraction.url = APP_URL + "/api/minorAttractions/" + \ str(minorAttraction.id) minorAttraction.reviews_url = \ APP_URL + "/api/minorAttractions/" + \ str(minorAttraction.id) + "/reviews" minorAttraction.save() json = loads(minorAttraction.to_json()) json['majorAttraction_url'] = minorAttraction.majorAttraction.url json = JSONEncoder().encode(json) response = jsonify(loads(json)) response.status_code = 201 response.headers['Location'] = '/api/minorAttractions/' + \ str(minorAttraction.id) return response
def test_uuid(self): doc = {'uuid': uuid.UUID('f47ac10b-58cc-4372-a567-0e02b2c3d479')} self.round_trip(doc) self.assertEqual( '{"uuid": {"$uuid": "f47ac10b58cc4372a5670e02b2c3d479"}}', json_util.dumps(doc)) self.assertEqual( '{"uuid": ' '{"$binary": "9HrBC1jMQ3KlZw4CssPUeQ==", "$type": "03"}}', json_util.dumps( doc, json_options=json_util.STRICT_JSON_OPTIONS)) self.assertEqual( '{"uuid": ' '{"$binary": "9HrBC1jMQ3KlZw4CssPUeQ==", "$type": "04"}}', json_util.dumps( doc, json_options=json_util.JSONOptions( strict_uuid=True, uuid_representation=STANDARD))) self.assertEqual( doc, json_util.loads( '{"uuid": ' '{"$binary": "9HrBC1jMQ3KlZw4CssPUeQ==", "$type": "03"}}')) for uuid_representation in ALL_UUID_REPRESENTATIONS: options = json_util.JSONOptions( strict_uuid=True, uuid_representation=uuid_representation) self.round_trip(doc, json_options=options) # Ignore UUID representation when decoding BSON binary subtype 4. self.assertEqual(doc, json_util.loads( '{"uuid": ' '{"$binary": "9HrBC1jMQ3KlZw4CssPUeQ==", "$type": "04"}}', json_options=options))
def post(self): data = request.get_json() if not data: abort(400) root_args = self.root_parser.parse_args() if 'admin' in data: if data['admin'] == "true": data['admin'] = True else: data['admin'] = False user = models.User(**data) user.save() user.url = APP_URL + "/api/users/" + \ str(user.id) user.history_url = APP_URL + "/api/users/" + \ str(user.id) + "/history" user.save() json = loads(user.to_json()) json['timestamp'] = json['timestamp'].strftime('%Y-%m-%dT%H:%M:%SZ') json = JSONEncoder().encode(json) response = jsonify(loads(json)) response.status_code = 201 response.headers['Location'] = '/api/users/' + \ str(user.id) return response
def put(self, _id=None, reviews=None): if reviews: abort(400) if _id: data = request.get_json() if not data: abort(400) root_args = self.root_parser.parse_args() if(data['location']): location_args = \ self.location_parser.parse_args(req=root_args) if(data['details']): details_args = \ self.details_parser.parse_args(req=root_args) minorAttraction = \ models.MinorAttraction.objects.get_or_404(id=_id) majorAttraction = models.MajorAttraction.objects.get_or_404( id=data['majorAttraction']) data['majorAttraction'] = majorAttraction minorAttraction.update(**data) minorAttraction = \ models.MinorAttraction.objects.get_or_404(id=_id) json = loads(minorAttraction.to_json()) json['majorAttraction_url'] = \ minorAttraction.majorAttraction.url json = JSONEncoder().encode(json) response = jsonify(loads(json)) response.status_code = 200 return response else: abort(400)
def cursorToArray(cursor, decrypted = False, probe = '', is_researcher=False, map_to_users=False): array = [] for row in cursor: if 'timestamp' in row: row['timestamp'] = int(time.mktime(row['timestamp'].timetuple())) if 'timestamp_added' in row: row['timestamp_added'] = int(time.mktime(row['timestamp_added'].timetuple())) array.append(row) if 'ExperienceSamplingProbe' in probe: for doc in array: doc['answer'] = json.loads(base64.b64decode(doc['answer'])) if 'EpidemicProbe' in probe: for doc in array: doc['data'] = json.loads(base64.b64decode(doc['data'])) if 'BluetoothProbe' not in probe: return array if decrypted: anonymizer = Anonymizer() return anonymizer.deanonymizeDocument(array, probe) if is_researcher and map_to_users: deviceInventory = device_inventory.DeviceInventory() for doc in array: try: user_temp = deviceInventory.mapBtToUser(doc['bt_mac'], doc['timestamp'], use_mac_if_empty=False) if user_temp is not None: doc['scanned_user'] = user_temp else: doc['scanned_user'] = '' except KeyError: doc['scanned_user'] = '' return array
def run(): cursor = db.junctions.find({"_id":"30/7/1"}) json_str =json_util.dumps(cursor) junctions =json_util.loads(json_str) junctions = sorted(junctions, key=lambda k: k['route']) d1 = None for junction in junctions: d1 = process(junction["_id"]) cursor = db.junctions.find({"_id":"13/2/1"}) json_str =json_util.dumps(cursor) junctions =json_util.loads(json_str) junctions = sorted(junctions, key=lambda k: k['route']) d2 = None for junction in junctions: d2 = process(junction["_id"]) cursor = db.junctions.find({"_id":"17/6/1"}) json_str =json_util.dumps(cursor) junctions =json_util.loads(json_str) junctions = sorted(junctions, key=lambda k: k['route']) d3 = None for junction in junctions: d3 = process(junction["_id"]) #d3.plot() d = df({ "Low - 30/7/1": [d1["STT"].quantile(i/100) for i in range(1,99,10)], "Medium - 13/2/1":[d2["STT"].quantile(i/100) for i in range(1,99,10)], "High - 17/6/1":[d3["STT"].quantile(i/100) for i in range(1,99,10)] }) d.plot(ylim=[0,600]) plt.show()
def test_complete_partial_record(): logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)-8s %(message)s') object_id_bson_raw_data = '{\ "_id": { "$oid": "56b8da59f9fcee1b00000007" }\ }' array_bson_raw_data = '{\ "comments": [{\ "_id": {"$oid": "56b8f344f9fcee1b00000018"},\ "updated_at": { "$date" : "2016-02-08T19:57:56.678Z"},\ "created_at": { "$date" : "2016-02-08T19:57:56.678Z"}\ }\ ]}' sample_data = { 'post_comments': { 'id_oid': [str(loads('{ "$oid": "56b8f344f9fcee1b00000018" }'))], 'updated_at': [loads('{ "$date" : "2016-02-08T19:57:56.678Z"}')], 'created_at': [loads('{ "$date" : "2016-02-08T19:57:56.678Z"}')] } } dbname = 'rails4_mongoid_development' db_schemas_path = '/'.join(['test_data', 'schemas', dbname]) schemas = get_schema_engines_as_dict(db_schemas_path) schema_engine = schemas['posts'] bson_data = loads(array_bson_raw_data) object_id_bson_data = loads(object_id_bson_raw_data) partial_inserts_list = get_tables_data_from_oplog_set_command(\ schema_engine, bson_data, object_id_bson_data) tables = partial_inserts_list[0].tables print tables for table_name in tables: assert(True==tables[table_name].compare_with_sample(sample_data))
def test_partial_record2(): locate_name = ['comments', 'items', 'indices'] bson_raw_id_data = '{"_id": {"$oid": "56b8da51f9fcee1b00000006"}}' array_bson_raw_data = '[21, 777]' collection_name = 'a_inserts' schema_engine = get_schema_engine( collection_name ) node = schema_engine.locate(locate_name) bson_object_id = loads(bson_raw_id_data) bson_data = loads(array_bson_raw_data) whole_bson = node.json_inject_data(bson_data, bson_object_id.keys()[0], bson_object_id.values()[0]) print whole_bson tables = Tables(schema_engine, whole_bson) tables.load_all() print "tables.tables.keys()", tables.tables.keys() table_name = 'a_insert_comment_item_indices' indices_t = tables.tables[table_name] print "sql_column_names", indices_t.sql_column_names assert(indices_t.sql_columns['indices']) # verify parent ids parent_id_oid = indices_t.sql_columns['a_inserts_id_oid'].values[0] # Old bag fixed: no more parent ids assert('a_inserts_comments_id_oid' not in indices_t.sql_columns.keys()) assert('a_inserts_comments_items_id_oid' not in indices_t.sql_columns.keys()) assert('a_inserts_comments_items_indices_id_oid' not in indices_t.sql_columns.keys()) print "parent_id_oid", parent_id_oid assert(parent_id_oid == "56b8da51f9fcee1b00000006") #both tables: a_inserts, comments should be available assert(len(tables.tables.keys())==4)
def load_data_from_mongoexport(res_id, export_location, collection_name, remove_id=False): """ This file should come from mongoexport, with or without the --jsonArray flag. That is to say, it should either be a series of documents, each on its own line, or a single array of documents. All documents will be inserted into the given collection. """ export_location = _data_file_path(export_location) with open(export_location) as export: first_char = export.read(1) export.seek(0, SEEK_SET) if first_char == '[': # Data is already in an array documents = loads(export.read()) else: # Each line of data is an object documents = [] for line in export: documents.append(loads(line)) if remove_id: _remove_id(documents) with UseResId(res_id) as db: db[collection_name].insert(documents)
def get_friends(access_token): #maxPage=4 #friends = json.load(urllib2.urlopen('https://graph.facebook.com/me/friends?limit=1000&access_token=%s'%access_token)) #Friends = [] #while "next" in friends["paging"]: # if maxPage == 0: # break # maxPage = maxPage - 1 # Friends = Friends + friends["data"] # friends = json.load(urllib2.urlopen(friends["paging"]["next"])) #return Friends friends = [] url = 'https://graph.facebook.com/me/friends?limit=1000&access_token=%s'%access_token print url f = urllib2.urlopen(url) json_string = f.read() for friend in loads(json_string)['data']: url = 'https://graph.facebook.com/%s?limit=1000&access_token=%s'%(friend['id'], access_token) f.close() f = urllib2.urlopen(url) json_string_2 = f.read() friends.append(loads(json_string_2)) f.close() return friends
def simple(self): """ Return a JS-free table of shows. """ airing = loads(self.api.shows.by_group('airing')) now = dt.utcnow() positions = [ ['translator', 'translated'], ['editor', 'edited'], ['timer', 'timed'], ['typesetter', 'typeset'] ] tbl_airing = [] for show in sorted(airing, key=lambda k: k['airtime']): encoded = "Yes" if show['progress']['encoded'] else "No" eta = (show['airtime'] - loads(dumps(now))).total_seconds()/60 row_class = { True: 'subbing', False: { True: 'airing_1', False: { True: 'airing_3', False: { True: 'airing_6', False: { True: 'airing_12', False: '' }.get(eta<=720) }.get(eta<=360) }.get(eta<=180) }.get(eta<=60) }.get(eta<0) row = '<tr class="%s">' % row_class row += '<td class="title">%s</td>' % show['titles']['english'] row += '<td>%d (of %d)</td>' % (show['episodes']['current'] + 1, show['episodes']['total']) row += '<td>%s</td>' % show['airtime'] row += '<td><a href="%s"><i class="icon-black ' % show['link'] row += 'icon-info-sign"></i></a></td>' for p in positions: row += '<td class="staff-status-%s">%s</td>' % ( str(show['progress'][p[1]]).lower(), show['staff'][p[0]]['name'] ) row += '<td class="staff-status-%s encoded-status">%s</td>' %\ (str(show['progress']['encoded']).lower(), encoded) tbl_airing.append(row) table = '<div id="食べ物" class="pure-u">' \ '<table class="pure-table pure-table-horizontal"><thead><tr>' \ '<th>Series</th><th>Episode</th><th>airs on</th><th>Archive</th>' \ '<th>Translator</th><th>Editor</th><th>Timer</th>' \ '<th>Typesetter</th><th>Encoded?</th></tr></thead>'\ '<tbody>%s</tbody></table></div>' % ''.join(tbl_airing) body = '<!DOCTYPE html><html lang="en"><head>' \ '<title>Wagnaria!</title>' \ '<link href="css/pure-min.css" rel="stylesheet" media="screen">' \ '<link href="css/glyphs.min.css" rel="stylesheet" media="screen">' \ '<link href="css/main.css" rel="stylesheet" media="screen">' \ '</head><body>%s</body></html>' % table response.content_type = "text/html" return body
def patch(self): newRoomData = self.get_argument('rooms') newItemData = self.get_argument('items') newNPCData = self.get_argument('npcs') from bson.json_util import loads self.set_rooms(loads(newRoomData)) self.set_items(loads(newItemData)) self.set_npcs(loads(newNPCData))
def test_loads_document_class(self): # document_class dict should always work self.assertEqual({"foo": "bar"}, json_util.loads( '{"foo": "bar"}', json_options=json_util.JSONOptions(document_class=dict))) self.assertEqual(SON([("foo", "bar"), ("b", 1)]), json_util.loads( '{"foo": "bar", "b": 1}', json_options=json_util.JSONOptions(document_class=SON)))
def occupy(): if not is_login(): return jsonify({ 'status':401, 'data':'user not log in' }) story_id = json.loads(request.data).get('story_id') #story_id为空,随机返回一个未被占用的故事 if story_id == "": result = Story.get_story_id_by_state(0) if result: #从列表中随机选取一个故事 return_story_id = random.choice(loads(result)) result_story = Story.get_story_by_id(return_story_id['_id']) if result_story == 'null': return jsonify({ 'status':403, 'data':'not existing target story' }) else: result_story = loads(result_story) del result_story['paragraph_ids'] del result_story['current_owner'] return jsonify({ 'status':200, 'data':convert_id(result_story) }) else: return jsonify({ 'status':200, 'data':"" }) #story_id不为空,续写飞机,进行锁定 else: story = Story.get_story_by_id(ObjectId(story_id)) if story != 'null': story = loads(story) story['lock_time'] = datetime.now().strftime("%Y-%m-%d %H:%M:%S") story['state'] = 1 story['current_owner'] = loads(get_current_user())['_id'] result = Story.update_story(story) if result == "": del story['current_owner'] del story['paragraph_ids'] return jsonify({ 'status':200, 'data':convert_id(story) }) else: return jsonify({ 'status':403, 'data':'update fail' }) else: return jsonify({ 'status':403, 'data':'invalid story id' })
def remove(self, id): i = get_object_or_404(self.objectmanager, id=id) data = loads(request.data) i.remove(iterify(data['links']), iterify(data['nodes'])) return render(i.info())
def all_results(): """fetch all results from database""" if db.mongoatlas.client is not None: returned_obj = loads(dumps(db.scraped_col.find())) return db.json_encoder.encode(returned_obj)
channel_names = [re.sub(r"'+", '', name) for name in channel_names] data_types = [ 'daily_subs', 'total_subs', 'daily_views', 'total_views', 'average_views', 'monthly_views' ] if mongoDb.social_blade_asmr_response.find_one(): asmr_channels_data_list = [] social_blade_asmr_response_dict = {} asmr_social_blade_data_dict = {} for index, url in enumerate(sb_urls): channel = channel_names[index] collection = mongoDb.social_blade_asmr_response html = loads(dumps(collection.find({channel: { '$exists': True }})))[0][channel] blade_soup = bs(html, 'html.parser') channel_name = blade_soup.find('h1').text id_anchor = blade_soup.find( 'a', class_='core-button -margin core-small-wide ui-black', rel='nofollow')['href'] channel_id = re.search(r'(?<=channel/).*', id_anchor).group(0) uploads = blade_soup.find('span', id="youtube-stats-header-uploads").text subs = blade_soup.find('span', id="youtube-stats-header-subs").text views = blade_soup.find('span', id="youtube-stats-header-views").text country = blade_soup.find('span', id="youtube-stats-header-country").text
def from_json(cls, json_data, created=False): """Converts json data to an unsaved document instance""" return cls._from_son(json_util.loads(json_data), created=created)
def setUp(self): super().setUp() self.kubernetes_service = MagicMock() self.dummy_credentials = b64encode( json.dumps({ "user": "******" }).encode()) self.kubernetes_service.getSecret.return_value = V1Secret( metadata=V1ObjectMeta(name="mongo-cluster-admin-credentials", namespace="default"), data={ "password": b64encode(b"random-password"), "username": b64encode(b"root"), "json": self.dummy_credentials }, ) self.service = MongoService(self.kubernetes_service) self.cluster_dict = getExampleClusterDefinition() self.cluster_object = V1MongoClusterConfiguration(**self.cluster_dict) self.not_initialized_response = { "info": "run rs.initiate(...) if not yet done for the set", "ok": 0, "errmsg": "no replset config has been received", "code": 94, "codeName": "NotYetInitialized" } self.initiate_ok_response = loads(""" {"ok": 1.0, "operationTime": {"$timestamp": {"t": 1549963040, "i": 1}}, "$clusterTime": {"clusterTime": {"$timestamp": {"t": 1549963040, "i": 1}}, "signature": {"hash": {"$binary": "AAAAAAAAAAAAAAAAAAAAAAAAAAA=", "$type": "00"}, "keyId": 0}}} """) self.initiate_not_found_response = loads(""" {"ok": 2, "operationTime": {"$timestamp": {"t": 1549963040, "i": 1}}, "$clusterTime": {"clusterTime": {"$timestamp": {"t": 1549963040, "i": 1}}, "signature": {"hash": {"$binary": "AAAAAAAAAAAAAAAAAAAAAAAAAAA=", "$type": "00"}, "keyId": 0}}} """) self.expected_cluster_config = { "_id": "mongo-cluster", "version": 1, "members": [{ "_id": 0, "host": "mongo-cluster-0.mongo-cluster.mongo-operator-cluster.svc.cluster.local" }, { "_id": 1, "host": "mongo-cluster-1.mongo-cluster.mongo-operator-cluster.svc.cluster.local" }, { "_id": 2, "host": "mongo-cluster-2.mongo-cluster.mongo-operator-cluster.svc.cluster.local" }] } self.expected_user_create = { "pwd": "random-password", "roles": [{ "role": "root", "db": "admin" }] }
def test_undefined(self): jsn = '{"name": {"$undefined": true}}' self.assertIsNone(json_util.loads(jsn)['name'])
def writeToCollection(collection, df): jsonStrings = df.to_json(orient='records') bsonStrings = json_util.loads(jsonStrings) collection.insert_many(bsonStrings, ordered=False)
def to_json(self): return loads(dumps(self, default=lambda o: o.__dict__, sort_keys=True))
async def patch(self, request, user_id, todo_id, format=None): data = loads(request.body) todo = await self.todo_manager.update(todo_id, data) response = {'status': True, 'data': todo} return HttpResponse(dumps(response))
def return_data(data): return json_util.loads(json_util.dumps(data))
def get(self): if self.current_user: user_id = self.current_user["id"] contact = Contact() response_obj = contact.ListByUserId(user_id) contactos = [] cities = [] if "success" in response_obj: contactos = json_util.loads(response_obj["success"]) # else: # self.render("beauty_error.html",message="Error al obtener la lista de contactos:{}".format(response_obj["error"])) # return # use globals default to avoid exception web_cellar_id = cellar_id c = Cellar() res_cellar_id = c.GetWebCellar() if "success" in res_cellar_id: web_cellar_id = res_cellar_id["success"] cart = Cart() cart.user_id = user_id lista = cart.GetCartByUserId() suma = 0 for l in lista: suma += l["subtotal"] res_web_cellar = c.InitById(web_cellar_id) if "success" in res_web_cellar: cellar_city_id = c.city_id city = City() city.from_city_id = cellar_city_id res_city = city.ListByFromCityId() # print res_city post_office_list = [] po = PostOffice() res_po = po.ListOnlyWithShippingCost() if "success" in res_po: post_office_list = res_po["success"] if suma > 0: if "success" in res_city: cities = res_city["success"] self.render("store/checkout-1.html", contactos=contactos, data=lista, suma=suma, cities=cities, post_office_list=post_office_list) else: self.render("beauty_error.html", message="Carro está vacío") else: self.redirect("/auth/login")
def get(self): if self.current_user: user_id = self.current_user["id"] nombre = self.get_argument("name", self.current_user["name"]) apellido = self.get_argument("lastname", self.current_user["lastname"]) email = self.get_argument("email", self.current_user["email"]) direccion = self.get_argument("address", "") ciudad = self.get_argument("city_id", "") codigo_postal = self.get_argument("zip_code", "") informacion_adicional = self.get_argument("additional_info", "") telefono = self.get_argument("telephone", "") id_contacto = self.get_argument("contact_id", "") comuna = self.get_argument("town", "") rut = self.get_argument("rut", "") shipping_type = self.get_argument("shipping_type", "") post_office_id = self.get_argument("post_office_id", "") shipping_type_id = 1 cart = Cart() cart.user_id = user_id lista = cart.GetCartByUserId() if len(lista) <= 0: self.render("beauty_error.html", message="Carro está vacío") contact = Contact() contact.name = nombre contact.lastname = apellido contact.telephone = telefono contact.email = email contact.address = direccion if shipping_type == "chilexpress": po = PostOffice() po.InitById(post_office_id) post_office_name = po.name contact.address = "Oficina {}".format(post_office_name) contact.city = ciudad contact.zip_code = codigo_postal contact.user_id = user_id contact.additional_info = informacion_adicional contact.town = comuna contact.rut = rut operacion = "" if id_contacto != "": contact.id = id_contacto response_obj = contact.Edit() operacion = "editar" else: response_obj = contact.Save() operacion = "guardar" if "error" in response_obj: self.render("beauty_error.html", message="Error al {} contacto {}".format( operacion, response_obj["error"])) else: items = 0 suma = 0 for l in lista: c = Cart() response_obj = c.InitById(l["id"]) if "success" in response_obj: c.shipping_id = contact.id c.shipping_info = contact.additional_info c.Edit() else: print response_obj["error"] suma += l["subtotal"] items += l["quantity"] contactos = [] cities = [] response_obj = contact.ListByUserId(user_id) city = City() res_city = city.List() if "success" in response_obj: contactos = json_util.loads(response_obj["success"]) if "success" in res_city: cities = res_city["success"] c = Cellar() res_cellar_id = c.GetWebCellar() web_cellar_id = cellar_id if "success" in res_cellar_id: web_cellar_id = res_cellar_id["success"] res_web_cellar = c.InitById(web_cellar_id) if "success" in res_web_cellar: if shipping_type != "chilexpress": cellar_city_id = c.city_id shipping = Shipping() shipping.from_city_id = int(cellar_city_id) shipping.to_city_id = int(ciudad) res = shipping.GetGianiPrice() if "error" in res: self.render( "beauty_error.html", message="Error al calcular costo de despacho, {}" .format(res["error"])) else: shipping_type_id = 2 shipping = Shipping() shipping.post_office_id = post_office_id res = shipping.GetPriceByPostOfficeId() if "error" in res: self.render( "beauty_error.html", message= "Error al calcular costo de despacho de Chilexpress, {}" .format(res["error"])) else: if shipping.charge_type == 1: costo_despacho = shipping.price * items else: costo_despacho = shipping.price self.render("store/checkout-2.html", contactos=contactos, data=lista, suma=suma, selected_address=direccion, cities=cities, costo_despacho=costo_despacho, shipping_type=shipping_type_id, post_office_id=post_office_id) else: self.redirect("/auth/login")
from bson import json_util from bson.code import Code # creating the link client = MongoClient('localhost', 27017) db = client.project5 db.tree.drop() collection = db.tree print("------------------------") print("Question 1") print("------------------------") print("Load the Q1 example data") with open("Q1_pyMongo_data.json") as f: data = f.read() data = json_util.loads(data) collection.insert(data) print(db.tree.count()) print("========\t") # 1) Assume we model the records and relationships in Figure 1 using the Parent-Referencing model (Slide 49 in # MongoDB-2). Write a query to report the ancestors of “MongoDB”. The output should be an array containing # values [{Name: “Databases”, Level: 1}, # {Name: “Programming”, Level: 2}, # {Name: “Books”, Level: 3}] print("Question 1.1") node = collection.find_one({"_id": "MongoDB"}) queue = [node["parent"]] lvl = 0 while queue: temp = queue.pop(0)
cache_size_res = table.find_one() print ("Cache key\n") print f_path get_mpd = mpdinfo.find_one({"urn":str(f_path)}) if get_mpd is None: print "File not found\n" new_seg_size = get_mpd['seg_size'] evict = False evict_seg_size = 0 res = table.find_one({'$query': {}, '$orderby': {"date": -1}}) if res is not None: print "Get cache_size\n" pipe = [{'$group': {'_id': None, 'cache_size': {'$sum': '$seg_size'}}}] res2 = table.aggregate(pipe) estimated_size = loads(dumps(res2)) estimated_cache_size=int(estimated_size[0]['cache_size'])+ new_seg_size print estimated_cache_size else: estimated_cache_size+=new_seg_size # Perform Cache Eviction while evict is False: for res in table.find().sort([("date", pymongo.ASCENDING)]).limit(1): print "Non empty cache\n" if res['date'] is not None: if estimated_cache_size > MAX_CACHE_SIZE: table.remove({"date": res["date"]}) estimated_cache_size-=res['seg_size'] else: evict = True
def test_datetime(self): # only millis, not micros self.round_trip( {"date": datetime.datetime(2009, 12, 9, 15, 49, 45, 191000, utc)}) jsn = '{"dt": { "$date" : "1970-01-01T00:00:00.000+0000"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) jsn = '{"dt": { "$date" : "1970-01-01T00:00:00.000000+0000"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) jsn = '{"dt": { "$date" : "1970-01-01T00:00:00.000+00:00"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) jsn = '{"dt": { "$date" : "1970-01-01T00:00:00.000000+00:00"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) jsn = '{"dt": { "$date" : "1970-01-01T00:00:00.000000+00"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) jsn = '{"dt": { "$date" : "1970-01-01T00:00:00.000Z"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) jsn = '{"dt": { "$date" : "1970-01-01T00:00:00.000000Z"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) # No explicit offset jsn = '{"dt": { "$date" : "1970-01-01T00:00:00.000"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) jsn = '{"dt": { "$date" : "1970-01-01T00:00:00.000000"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) # Localtime behind UTC jsn = '{"dt": { "$date" : "1969-12-31T16:00:00.000-0800"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) jsn = '{"dt": { "$date" : "1969-12-31T16:00:00.000000-0800"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) jsn = '{"dt": { "$date" : "1969-12-31T16:00:00.000-08:00"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) jsn = '{"dt": { "$date" : "1969-12-31T16:00:00.000000-08:00"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) jsn = '{"dt": { "$date" : "1969-12-31T16:00:00.000000-08"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) # Localtime ahead of UTC jsn = '{"dt": { "$date" : "1970-01-01T01:00:00.000+0100"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) jsn = '{"dt": { "$date" : "1970-01-01T01:00:00.000000+0100"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) jsn = '{"dt": { "$date" : "1970-01-01T01:00:00.000+01:00"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) jsn = '{"dt": { "$date" : "1970-01-01T01:00:00.000000+01:00"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) jsn = '{"dt": { "$date" : "1970-01-01T01:00:00.000000+01"}}' self.assertEqual(EPOCH_AWARE, json_util.loads(jsn)["dt"]) dtm = datetime.datetime(1, 1, 1, 1, 1, 1, 0, utc) jsn = '{"dt": {"$date": -62135593139000}}' self.assertEqual(dtm, json_util.loads(jsn)["dt"]) jsn = '{"dt": {"$date": {"$numberLong": "-62135593139000"}}}' self.assertEqual(dtm, json_util.loads(jsn)["dt"]) # Test dumps format pre_epoch = {"dt": datetime.datetime(1, 1, 1, 1, 1, 1, 10000, utc)} post_epoch = {"dt": datetime.datetime(1972, 1, 1, 1, 1, 1, 10000, utc)} self.assertEqual('{"dt": {"$date": -62135593138990}}', json_util.dumps(pre_epoch)) self.assertEqual('{"dt": {"$date": 63075661010}}', json_util.dumps(post_epoch)) self.assertEqual( '{"dt": {"$date": {"$numberLong": "-62135593138990"}}}', json_util.dumps(pre_epoch, json_options=STRICT_JSON_OPTIONS)) self.assertEqual( '{"dt": {"$date": "1972-01-01T01:01:01.010Z"}}', json_util.dumps(post_epoch, json_options=STRICT_JSON_OPTIONS)) number_long_options = json_util.JSONOptions( datetime_representation=DatetimeRepresentation.NUMBERLONG) self.assertEqual( '{"dt": {"$date": {"$numberLong": "63075661010"}}}', json_util.dumps(post_epoch, json_options=number_long_options)) self.assertEqual( '{"dt": {"$date": {"$numberLong": "-62135593138990"}}}', json_util.dumps(pre_epoch, json_options=number_long_options)) # ISO8601 mode assumes naive datetimes are UTC pre_epoch_naive = {"dt": datetime.datetime(1, 1, 1, 1, 1, 1, 10000)} post_epoch_naive = { "dt": datetime.datetime(1972, 1, 1, 1, 1, 1, 10000) } self.assertEqual( '{"dt": {"$date": {"$numberLong": "-62135593138990"}}}', json_util.dumps(pre_epoch_naive, json_options=STRICT_JSON_OPTIONS)) self.assertEqual( '{"dt": {"$date": "1972-01-01T01:01:01.010Z"}}', json_util.dumps(post_epoch_naive, json_options=STRICT_JSON_OPTIONS)) # Test tz_aware and tzinfo options self.assertEqual( datetime.datetime(1972, 1, 1, 1, 1, 1, 10000, utc), json_util.loads( '{"dt": {"$date": "1972-01-01T01:01:01.010+0000"}}')["dt"]) self.assertEqual( datetime.datetime(1972, 1, 1, 1, 1, 1, 10000, utc), json_util.loads( '{"dt": {"$date": "1972-01-01T01:01:01.010+0000"}}', json_options=json_util.JSONOptions(tz_aware=True, tzinfo=utc))["dt"]) self.assertEqual( datetime.datetime(1972, 1, 1, 1, 1, 1, 10000), json_util.loads( '{"dt": {"$date": "1972-01-01T01:01:01.010+0000"}}', json_options=json_util.JSONOptions(tz_aware=False))["dt"]) self.round_trip(pre_epoch_naive, json_options=json_util.JSONOptions(tz_aware=False)) # Test a non-utc timezone pacific = FixedOffset(-8 * 60, 'US/Pacific') aware_datetime = { "dt": datetime.datetime(2002, 10, 27, 6, 0, 0, 10000, pacific) } self.assertEqual( '{"dt": {"$date": "2002-10-27T06:00:00.010-0800"}}', json_util.dumps(aware_datetime, json_options=STRICT_JSON_OPTIONS)) self.round_trip(aware_datetime, json_options=json_util.JSONOptions(tz_aware=True, tzinfo=pacific)) self.round_trip( aware_datetime, json_options=json_util.JSONOptions( datetime_representation=DatetimeRepresentation.ISO8601, tz_aware=True, tzinfo=pacific))
def json_data(*paths): return json_util.loads(read(*paths), json_options=JSON_OPTS)
def round_tripped(self, doc, **kwargs): return json_util.loads(json_util.dumps(doc, **kwargs), **kwargs)
def getClipDataByRow(self): current_row = self.currentRow() current_item = self.currentItem() current_item = json.loads(current_item.data(QtCore.Qt.UserRole)) #http://stackoverflow.com/questions/25452125/is-it-possible-to-add-a-hidden-value-to-every-item-of-qlistwidget return current_row, current_item
def _getFixture(name): with open("tests/fixtures/mongo_responses/{}.json".format(name)) as f: return loads(f.read())
def astable_filter(value): return json2html.convert(json=loads(dumps(value)), table_attributes='class="table table-sm"')
def create_ride(): do_count(1) req_data = request.get_json() name = req_data['created_by'] #data = db.users.find({'username': name}) #query = {'collection': 'users','data':{ 'username': name}} rec = requests.get( url='http://lb-591453474.us-east-1.elb.amazonaws.com/api/v1/users', headers={'Origin': '54.89.99.52'} ) #LOOK here we use requests.get as it uses GET method ( list_users_api) rdata_old = loads(rec.text) rdata = [] for val in rdata_old: if (name == val): rdata.append(val) rideId = 1 max = 0 if (len(rdata) > 0): timestamp = req_data['timestamp'] # query = {'timestamp': timestamp} # rec = requests.post(url='http://assg3-lb-1719997549.us-east-1.elb.amazonaws.com/api/time',json=query) tm = get_timestamp(timestamp) if (tm != "valid"): return jsonify({}), 400 #rides = list(db.rides.find()) query = {'collection': 'rides', 'data': {}} rec = requests.post(url='http://34.198.254.117/api/v1/db/read', json=query) rides = loads(rec.text) if (len(rides) <= 0): rideId = 1 else: for i in range(0, len(rides)): if (rides[i]['rideId'] > max): max = rides[i]['rideId'] rideId = max + 1 source = req_data['source'] if (int(source) < 1 or int(source) > 198): return jsonify({}), 400 dest = req_data['destination'] if (int(dest) < 1 or int(dest) > 198): return jsonify({}), 400 # db.rides.insert({ # 'rideId': rideId, # 'created_by': name, # 'joinee': [], # 'timestamp' : timestamp, # 'source':source, # 'destination': dest # }) query = { 'collection': 'rides', 'work': 'insert', 'data': { 'rideId': rideId, 'created_by': name, 'users': [], 'timestamp': timestamp, 'source': source, 'destination': dest } } a = requests.post(url='http://34.198.254.117/api/v1/db/write', json=query) return jsonify({}), 201 else: return jsonify({}), 400
def import_json_file(self, filename='movies.json'): # type: (str) -> None print("import movies from", filename) with open(filename) as f: self.import_bson(json_util.loads(f.read()))
def test_get(self): # simple get with no errors expected resp = self.app.get('/accounts/234234324324424') customer = loads(resp.data) assert customer['number'] == '234234324324424'
def post(self): name = self.get_argument("name", "") email = self.get_argument("email", "") password = self.get_argument("password", "") re_password = self.get_argument("re-password", "") tos = self.get_argument("tos", "") ajax = self.get_argument("ajax", "false") user_id = int(self.get_argument("user_id", 0)) # print tos if name == "": self.write(json_util.dumps({"error": "debe ingresar su nombre"})) return elif email == "": self.write(json_util.dumps({"error": "debe ingresar el email"})) return elif password == "": self.write( json_util.dumps({"error": "debe ingresar la contraseña"})) return elif password != re_password: self.write( json_util.dumps({"error": "las contraseñas no coinciden"})) return elif tos != "on": self.write( json_util.dumps( {"error": "debe aceptar las condiciones de uso"})) return else: response = User().Exist(email) if "success" in response: if response["success"]: self.write( json_util.dumps({ "error": "ya existe un usuario registrado con este email" })) return else: self.write( json_util.dumps({ "error": "se ha producido un error {}".format(response['error']) })) return # perform login user = User() user.name = name user.email = email user.password = password user.user_type = UserType.CLIENTE user.status = User.ACEPTADO if user_id != 0: existe = User().Exist('', user_id) if "success" in existe: if existe["success"]: user.id = user_id user.Save() RegistrationEmail(user.name, user.email) response_obj = user.Login(user.email, user.password) if "success" in response_obj: self.set_secure_cookie("user_giani", response_obj["success"], expires_days=None) current_user_id = json_util.loads( response_obj["success"])["id"] if user_id != current_user_id: cart = Cart() response = cart.MoveTempToLoggedUser( user_id, current_user_id) self.write(json_util.dumps({"success": self.next})) return else: self.write(json_util.dumps({"error": str(response_obj)})) return # redirect is the request isn't aajx if ajax == "false": self.set_secure_cookie("user_giani", response_obj["success"], expires_days=None) self.write(json_util.dumps({"success": self.next}))
def spider(): """Query Spider or Create one""" if db.mongoatlas.client is not None: _spiders_col = loads(dumps(db.spider_col.find())) return db.json_encoder.encode(_spiders_col) return jsonify({"Status": 404, "msg": "Not Connected"})
def _save_user_profile(self, user): if not user: raise tornado.web.HTTPError(500, "Facebook authentication failed.") user_id = self.get_argument("user_id", "") usr = User() usr.name = user["name"] usr.email = user["email"] usr.user_type = UserType.VISITA if user_id != "": usr.id = user_id response = usr.Exist(user["email"]) if "success" in response: if not response["success"]: res = usr.Save() RegistrationEmail(usr.name, usr.email) if "error" in res: print res["error"] else: self.render("auth/fail.html", message=response["error"]) response_obj = usr.InitByEmail(user["email"]) # print response_obj if "success" in response_obj: current_user_id = json_util.loads(response_obj["success"])["id"] # print "user_id: {} current_user_id: {}".format(str(user_id),str(current_user_id)) if user_id != "": if str(user_id) != str(current_user_id): cart = Cart() response = cart.MoveTempToLoggedUser( user_id, current_user_id) # if "error" in response: # print "Error moving cart detail: {}".format(response["error"]) self.set_secure_cookie("user_giani", response_obj["success"], expires_days=0.02) _u = User() _u.updateLastView( current_user_id, datetime.now(pytz.timezone('Chile/Continental')).isoformat()) self.redirect(self.next) else: self.render("auth/fail.html", message=response_obj["error"]) # else: # self.write(response_obj["error"]) # conn = psycopg2.connect(conn_string) # cursor = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) # # self.write(user) # # return # cursor.execute("select * from \"user\" where email = %(email)s",{"email":user["email"]}) # data = cursor.fetchone() # _user = {} # if data: # _user["id"] = data["id"] # _user["name"] = data["name"] # _user["email"] = data["email"] # _user["type"] = data["type"] # _user["profile"] = data["profile"] # print "ya existe" # self.write("el usuario con el email ya existe") # else: # parameters = {"email":user["email"],"name":user["name"],"type":"facebook"} # try: # cursor.execute("insert into \"user\" (email, name, type) values (%(email)s,%(name)s,%(type)s)",parameters) # conn.commit() # try: # cursor.execute("select * from \"user\" where email = %(email)s",{"email":user["email"]}) # data = cursor.fetchone() # if data: # _user["id"] = data["id"] # _user["name"] = data["name"] # _user["email"] = data["email"] # _user["type"] = data["type"] # self.write("usuario creado correctamente") # except Exception, e: # self.write(str(e)) # except Exception,e: # self.write(str(e)) # self.set_secure_cookie("user_giani", json_util.dumps(_user, sort_keys=True, indent=4, default=json_util.default)) # self.redirect("/") pass
#!/usr/bin/env python # -*- coding: utf-8 -*- # Author: Furushchev <*****@*****.**> from collections import defaultdict from bson.json_util import loads import pandas as pd import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt with open("data_size.json") as f: data = loads(f.read()) df = defaultdict(list) total = 0 for i, d in enumerate(data): if i < 7: continue df["index"].append(i) df["from"].append(d["from"].strftime("%Y-%m")) df["to"].append(d["to"]) df["len"].append(d["data"]["len"]) size = d["data"]["size"] * 1e-3 * 1e-3 * 1e-3 # GB df["size"].append(size) total += size df["total"].append(total) df = pd.DataFrame(df) print df.head() ax = df.plot(x="from", y="total", grid=True)
# Define a chunk grammar, or chunk rules, then chunk grammar = """ NP: {<N.*>*<Suffix>?} # Noun phrase VP: {<V.*>*} # Verb phrase AP: {<A.*>*} # Adjective phrase """ while True: try: print("start this point") print(counter) cursor = db.twitter_korean_data.find().skip(counter) #extract text with korean for document in cursor: document = dumps(document) document_loaded = loads(document) created_at = document_loaded["created_at"] id_str = document_loaded["id_str"] text = document_loaded["text"] words = konlpy.tag.Twitter().pos(text) parser = nltk.RegexpParser(grammar) chunks = parser.parse(words) split_text = '' for subtree in chunks.subtrees(): if subtree.label() == "NP" or subtree.label() == "VP" or subtree.label() == "AP": split_text += ' '.join((e[0] for e in list(subtree))) + ' ' obj = {"created_at": created_at, "id_str": id_str, "split_text": split_text} # print(obj) split_korean_coll.insert_one(obj) print("saved split korean data") counter += 1
def deserialize(value): if isinstance(value, Serialized): return loads(value.json) else: return value
import numpy as np import xarray as xr import pymongo import datetime import pandas as pd import time import json from bson.json_util import loads # 1. 创建数据库 client = pymongo.MongoClient('localhost', 27317) temp = client.temp # db t = temp.temp # collection # 2. 获取数据信息,并插入数据 st = time.time() filename = "/home/alley/work/Dong/mongo/seasonal_analysis/data/data/temp.json" with open(filename, 'r', encoding='utf-8') as f: data = f.readlines() print(data) json = loads(data) print(json) t.insert_many(json) et = time.time() print(et - st)