def test_follow(db_conn, session, cards_table, follows_table): """ Expect to follow an entity. """ cards_table.insert({ 'entity_id': 'ABCD', 'created': r.now(), 'modified': r.now(), 'status': 'accepted', 'kind': 'video', }).run(db_conn) request = { 'cookies': {'session_id': session}, 'params': { 'entity': { 'kind': 'card', 'id': 'ABCD', } }, 'db_conn': db_conn } code, response = routes.follow.follow_route(request) assert code == 200
def test_update_vote(db_conn, users_table, topics_table, posts_table, units_table, session): """ Expect update vote to handle proposals correctly. """ create_user_in_db(users_table, db_conn) create_topic_in_db(topics_table, db_conn) create_proposal_in_db(posts_table, units_table, db_conn) posts_table.insert({ 'id': 'vbnm1234', 'created': r.now(), 'modified': r.now(), 'user_id': 'abcd1234', 'topic_id': 'wxyz7890', 'proposal_id': 'jklm', 'body': 'Boo!', 'response': False, 'kind': 'vote', 'replies_to_id': 'val2345t', }).run(db_conn) request = { 'cookies': {'session_id': session}, 'params': { 'body': 'Yay!', 'response': True, }, 'db_conn': db_conn } code, response = routes.topic.update_post_route(request, 'wxyz7890', 'vbnm1234') assert code == 200 assert True is response['post']['response']
def heartbeatUserSession(self, nickname): """ Updates the ``last_heartbeat`` field of this user's session. If the session does not exist it creates it. :param nickname: the nickname of the user whose session will be updated. Returns: Dict of the user session. """ session = r.table(self.USER_SESSIONS_TABLE).get( nickname ).run(self.conn) if not session: return r.table(self.USER_SESSIONS_TABLE).insert({ "id": nickname, "last_heartbeat": r.now(), "last_message": r.now(), "session_start": r.now() }).run(self.conn) else: return r.table(self.USER_SESSIONS_TABLE).get(nickname).update({ "last_heartbeat": r.now() }).run(self.conn)
def test_get_posts(db_conn, users_table, topics_table, posts_table): """ Expect to get posts for given topic. """ create_user_in_db(users_table, db_conn) create_topic_in_db(topics_table, db_conn) posts_table.insert([{ 'id': 'jklm', 'created': r.now(), 'modified': r.now(), 'user_id': 'abcd1234', 'topic_id': 'wxyz7890', 'body': '''A Modest Proposal for Preventing the Children of Poor People From Being a Burthen to Their Parents or Country, and for Making Them Beneficial to the Publick.''', 'kind': 'post', }, { 'id': 'tyui', 'created': r.now(), 'modified': r.now(), 'user_id': 'abcd1234', 'topic_id': 'wxyz7890', 'body': 'A follow up.', 'kind': 'post', }]).run(db_conn) request = { 'params': {}, 'db_conn': db_conn } code, response = routes.topic.get_posts_route(request, 'wxyz7890') assert code == 200 assert ('Beneficial to the Publick' in response['posts'][0]['body'] or 'Beneficial to the Publick' in response['posts'][1]['body'])
def publish(self, topic_key, payload): '''Publish a message to this exchange on the given topic''' self.assert_table() # first try to just update an existing document result = self.table.filter({ 'topic': topic_key if not isinstance(topic_key, dict) else r.literal(topic_key), }).update({ 'payload': payload, 'updated_on': r.now(), }).run(self.conn) # If the topic doesn't exist yet, insert a new document. Note: # it's possible someone else could have inserted it in the # meantime and this would create a duplicate. That's a risk we # take here. The consequence is that duplicated messages may # be sent to the consumer. if not result['replaced']: result = self.table.insert({ 'topic': topic_key, 'payload': payload, 'updated_on': r.now(), }).run(self.conn)
def create_proposal_in_db(posts_table, units_table, db_conn): posts_table.insert({ 'id': 'jklm', 'created': r.now(), 'modified': r.now(), 'user_id': 'abcd1234', 'topic_id': 'wxyz7890', 'body': '''A Modest Proposal for Preventing the Children of Poor People From Being a Burthen to Their Parents or Country, and for Making Them Beneficial to the Publick.''', 'kind': 'proposal', 'name': 'New Unit', 'replies_to_id': None, 'entity_version': { 'id': 'slash-1', 'kind': 'unit', }, }).run(db_conn) units_table.insert({ 'id': 'slash-1', 'created': r.time(2014, 1, 1, 'Z'), 'modified': r.time(2014, 1, 1, 'Z'), 'entity_id': 'slash', 'previous_id': None, 'language': 'en', 'name': 'Dividing two numbers.', 'status': 'accepted', 'available': True, 'tags': ['math'], 'body': 'The joy and pleasure of dividing numbers.', 'require_ids': ['plus', 'minus', 'times'], }).run(db_conn)
def test_get_user_follows(db_conn, session, users_table, follows_table): """ Expect to get user's follows, if requested and allowed. """ users_table.get('abcd1234').update({ 'settings': {'view_follows': 'public'} }).run(db_conn) follows_table.insert([{ 'id': 'JIkfo034n', 'user_id': 'abcd1234', 'entity': { 'kind': 'card', 'id': 'JFlsjFm', }, 'created': r.now(), 'modified': r.now(), }]).run(db_conn) request = { 'params': {'follows': True}, 'cookies': {'session_id': session}, 'db_conn': db_conn } code, response = routes.user.get_user_route(request, 'abcd1234') assert 'follows' in response assert len(response['follows']) == 1
def test_get_posts_votes(db_conn, users_table, units_table, topics_table, posts_table): """ Expect get posts for topic to render votes correctly. """ create_user_in_db(users_table, db_conn) create_topic_in_db(topics_table, db_conn) create_proposal_in_db(posts_table, units_table, db_conn) posts_table.insert({ 'id': 'asdf4567', 'created': r.now(), 'modified': r.now(), 'kind': 'vote', 'body': 'Hooray!', 'proposal_id': 'jklm', 'topic_id': 'wxyz7890', 'response': True, }).run(db_conn) request = { 'params': {}, 'db_conn': db_conn } code, response = routes.topic.get_posts_route(request, 'wxyz7890') assert code == 200 assert response['posts'][0]['kind'] in ('proposal', 'vote') assert response['posts'][1]['kind'] in ('proposal', 'vote')
def test_follow_409(db_conn, session, cards_table, follows_table): """ Expect to fail to follow entity if already followed. """ follows_table.insert({ 'id': 'JIkfo034n', 'user_id': 'abcd1234', 'entity': { 'kind': 'card', 'id': 'JFlsjFm', }, }).run(db_conn) cards_table.insert({ 'entity_id': 'JFlsjFm', 'created': r.now(), 'modified': r.now(), 'status': 'accepted', }).run(db_conn) request = { 'cookies': {'session_id': session}, 'params': { 'entity': { 'kind': 'card', 'id': 'JFlsjFm', } } } code, response = routes.follow.follow_route(request) assert code == 409
def heartbeatUserInGroup(self, nickname, group): """ Updates a user's subscription to a group. If the subscription does not exist it is created. If the group state entry for the group does not exist, it is created. :param nickname: the nickname of the user to subscribe. :param group: the name of the group to subscribe to. """ presence = r.table(self.GROUP_STATES_TABLE).get( group ).run(self.conn) if not presence: return r.table(self.GROUP_STATES_TABLE).insert({ "id": group, "users": { nickname: { "heartbeat": r.now() } } }).run(self.conn) else: return r.table(self.GROUP_STATES_TABLE).get(group).update({ "users": r.row["users"].merge({ nickname: { "heartbeat": r.now() } }) }).run(self.conn)
def upload_view(): upload_file = request.files['file'] file_stream = upload_file.stream.read() if file_stream: #: Collect upload file data sample = {'filename': secure_filename(upload_file.filename), 'sha1': hashlib.sha1(file_stream).hexdigest().upper(), 'sha256': hashlib.sha256(file_stream).hexdigest().upper(), 'md5': hashlib.md5(file_stream).hexdigest().upper(), 'ssdeep': pydeep.hash_buf(file_stream), 'filesize': len(file_stream), 'filetype': magic.from_buffer(file_stream), 'filemime': upload_file.mimetype, 'upload_date': r.now(), 'uploaded_by': "api", # g.user 'detection_ratio': dict(infected=0, count=0), 'filestatus': "Processing"} insert_in_samples_db(sample) update_upload_file_metadata(sample) #: Run all configured scanners sample['detection_ratio'] = scan_upload(file_stream, sample) #: Done Processing File sample['filestatus'] = 'Complete' sample['scancomplete'] = r.now() update_sample_in_db(sample) found = is_hash_in_db(sample['md5']) if found: return jsonify(found) else: return jsonify(dict(error='Not a valid API end point.', response=404)), 404 else: return jsonify(dict(error='Missing Parameters', response=400)), 400
def parse_message(self, message): message = json.loads(message['data']) rethinkdb.connect('localhost', 28015).repl() log_db = rethinkdb.db('siri').table('logs') data = { 'channel': message['channel'], 'timestamp': rethinkdb.now(), 'user': message['user'], 'content': message['content'], 'server': message['server'], 'bot': message['bot'] } log_db.insert(data).run() urls = re.findall(url_re, message['content']) if urls: for url in urls: urldata = { 'url': url, 'user': message['user'], 'channel': message['channel'], 'server': message['server'], 'bot': message['bot'], 'timestamp': rethinkdb.now() } gevent.spawn(self.parse_url, urldata) data['timestamp'] = datetime.datetime.utcnow() self.red.publish('irc_chat', json.dumps(data, default=json_datetime))
def test_remove_set(db_conn, session, sets_table, users_sets_table): """ Expect to remove a set from the user's list. """ sets_table.insert({ 'entity_id': 'A1', 'name': 'A', 'body': 'Apple', 'created': r.now(), 'modified': r.now(), 'status': 'accepted', }).run(db_conn) users_sets_table.insert({ 'user_id': 'abcd1234', 'set_ids': ['A1'], }).run(db_conn) request = { 'cookies': {'session_id': session}, 'db_conn': db_conn, } code, response = routes.user_sets.remove_set_route(request, 'abcd1234', 'A1') assert code == 200
def batch_query_bit9(new_hash_list): data = {} # : Break list into 1000 unit chunks for Bit9 bit9_batch_hash_list = list(split_seq(new_hash_list, 1000)) for thousand_hashes in bit9_batch_hash_list: result = bit9.lookup_hashinfo(thousand_hashes) if result['response_code'] == 200 and result['results']['hashinfos']: for hash_info in result['results']['hashinfos']: if hash_info['isfound']: data['md5'] = hash_info['fileinfo']['md5'].upper() else: data['md5'] = hash_info['requestmd5'].upper() hash_info['timestamp'] = r.now() # datetime.utcnow() data['Bit9'] = hash_info db_insert(data) data.clear() elif result['response_code'] == 404: for new_hash in new_hash_list: data = {'md5': new_hash.upper(), 'Bit9': {'timestamp': r.now(), # datetime.utcnow(), 'isfound': False, 'requestmd5': new_hash.upper()} } db_insert(data) data.clear()
def finish_db(rethink_uuid, db_device, read_bytes): """Finishes a document that has been updating with progress_db. :param rethink_uuid: The rethink UUID to finish :param db_device: The bare name of the device ('sda', 'sdb') :param read_bytes: Total number of bytes that were read. """ read_megs = (read_bytes / (1024 * 1024)) # Insert Data # noinspection PyUnusedLocal updated = r.db('wanwipe').table('wipe_results').get(rethink_uuid).update({ 'in_progress': False, 'finished': True, 'completed': True, 'progress': "100%", 'progress_bar': "==============================", 'time_remaining': "0:00:00", 'read_bytes': read_bytes, 'read_megs': read_megs, 'failed': False, 'success': True, 'updated_at': r.now(), 'finished_at': r.now() }).run(conn) # noinspection PyUnusedLocal machine_updated = r.db('wanwipe').table('machine_state').get(machine_state_uuid).update({ 'disks': { db_device: {'available': True, 'busy': False, 'wipe_completed': True, 'aborted': False, 'updated_at': r.now()}}, 'updated_at': r.now()}).run(conn) # Update the record timestamp. print("\ndiskaction: LocalDB: Finished writing to key: {}".format(rethink_uuid))
def post(self): user = self.get_current_user() trip = dict() trip_fields = { 'start_date': 'start_date', 'end_date': 'end_date', 'description': 'description', 'place_id': 'place_id', 'address': 'formatted_address', 'locality': 'locality', 'region': 'administrative_area_level_1', 'county': 'administrative_area_level_2', 'longitude': 'lng', 'latitude': 'lat' } for key in trip_fields: trip[key] = self.get_argument(trip_fields[key], None) trip_uuid = simpleflake() trip['trip_id'] = base62().hash(trip_uuid, 12) trip['created_at'] = r.now() trip['updated_at'] = r.now() trip['creator_user_id'] = user['id'] trip['geo'] = r.point(float(trip['longitude']), float(trip['latitude'])) r.table("trip").insert(trip).run() self.redirect("/")
def test_get_posts_paginate(db_conn, users_table, topics_table, posts_table): """ Expect get posts for topic to paginate. """ create_user_in_db(users_table, db_conn) create_topic_in_db(topics_table, db_conn) for i in range(0, 25): posts_table.insert({ 'id': 'jklm%s' % i, 'created': r.now(), 'modified': r.now(), 'user_id': 'abcd1234', 'topic_id': 'wxyz7890', 'body': 'test %s' % i, 'kind': 'post', }).run(db_conn) request = { 'params': {}, 'db_conn': db_conn } code, response = routes.topic.get_posts_route(request, 'wxyz7890') assert code == 200 assert len(response['posts']) == 10 request.update({'params': {'skip': 20}}) code, response = routes.topic.get_posts_route(request, 'wxyz7890') assert len(response['posts']) == 5
def __init__(self, who, text, title, project_id): self.birthtime = r.now() self.mtime = r.now() self.owner = who self.note = text self.title = title self.project_id = project_id self._type = "note"
def claim_sites(self, n=1): self.logger.trace('claiming up to %s sites to brozzle', n) result = ( self.rr.table('sites').get_all(r.args( r.db(self.rr.dbname).table('sites', read_mode='majority') .between( ['ACTIVE', r.minval], ['ACTIVE', r.maxval], index='sites_last_disclaimed') .order_by(r.desc('claimed'), 'last_disclaimed') .fold( {}, lambda acc, site: acc.merge( r.branch( site.has_fields('job_id'), r.object( site['job_id'].coerce_to('string'), acc[site['job_id'].coerce_to('string')].default(0).add(1)), {})), emit=lambda acc, site, new_acc: r.branch( r.and_( r.or_( site['claimed'].not_(), site['last_claimed'].lt(r.now().sub(60*60))), r.or_( site.has_fields('max_claimed_sites').not_(), new_acc[site['job_id'].coerce_to('string')].le(site['max_claimed_sites']))), [site['id']], [])) .limit(n))) .update( # try to avoid a race condition resulting in multiple # brozzler-workers claiming the same site # see https://github.com/rethinkdb/rethinkdb/issues/3235#issuecomment-60283038 r.branch( r.or_( r.row['claimed'].not_(), r.row['last_claimed'].lt(r.now().sub(60*60))), {'claimed': True, 'last_claimed': r.now()}, {}), return_changes=True)).run() self._vet_result( result, replaced=list(range(n+1)), unchanged=list(range(n+1))) sites = [] for i in range(result["replaced"]): if result["changes"][i]["old_val"]["claimed"]: self.logger.warn( "re-claimed site that was still marked 'claimed' " "because it was last claimed a long time ago " "at %s, and presumably some error stopped it from " "being disclaimed", result["changes"][i]["old_val"]["last_claimed"]) site = brozzler.Site(self.rr, result["changes"][i]["new_val"]) sites.append(site) self.logger.debug('claimed %s sites', len(sites)) if sites: return sites else: raise brozzler.NothingToClaim
def create_user_in_db(users_table, db_conn): return users_table.insert({ 'id': 'abcd1234', 'name': 'test', 'email': '*****@*****.**', 'password': bcrypt.encrypt('abcd1234'), 'created': r.now(), 'modified': r.now() }).run(db_conn)
def test_respond_card(db_conn, units_table, cards_table, cards_parameters_table, responses_table, session): """ Expect to respond to a card. (200) """ cards_table.insert({ 'entity_id': 'tyui4567', 'unit_id': 'vbnm7890', 'created': r.now(), 'modified': r.now(), 'status': 'accepted', 'kind': 'choice', 'name': 'Meaning of Life', 'body': 'What is the meaning of life?', 'options': [{ 'value': '42', 'correct': True, 'feedback': 'Yay!', }, { 'value': 'love', 'correct': False, 'feedback': 'Boo!', }], 'order': 'set', 'max_options_to_show': 4, }).run(db_conn) cards_parameters_table.insert({ 'entity_id': 'tyui4567' }).run(db_conn) units_table.insert({ 'entity_id': 'vbnm7890', 'created': r.now(), }).run(db_conn) redis.set('learning_context_abcd1234', json.dumps({ 'set': {'entity_id': 'jkl;1234'}, 'card': {'entity_id': 'tyui4567'}, })) request = { 'params': {'response': '42'}, 'cookies': {'session_id': session}, 'db_conn': db_conn, } code, response = routes.card.respond_to_card_route(request, 'tyui4567') assert code == 200 assert 'response' in response assert 'feedback' in response redis.delete('learning_context_abcd1234')
def db_remove_disk(conn, device): """Removes a disk from the the presence database. :param device: The device to remove """ # Insert Data r.table("posts").get("1").replace(r.row.without('author')).run() #replaced = r.db('wanwipe').table('machine_state').get(machine_state_uuid).replace(r.row.without(device)).run(conn) #updated = r.db('wanwipe').table('machine_state').get(machine_state_uuid).update({ # 'updated_at': r.now()}).run(conn) # Update the record timestamp. # noinspection PyUnusedLocal updated = r.db('wanwipe').table('machine_state').get(machine_state_uuid).update({'disks': { device: {'target': device, 'available': False, 'busy': False, 'updated_at': r.now(), 'removed_at': r.now()}}, 'updated_at': r.now()}).run(conn) # Update the record timestamp.
def create_topic_in_db(topics_table, db_conn, user_id='abcd1234'): topics_table.insert({ 'id': 'wxyz7890', 'created': r.now(), 'modified': r.now(), 'user_id': user_id, 'name': 'A Modest Proposal', 'entity': { 'id': 'efgh5678', 'kind': 'unit' } }).run(db_conn)
def create_post_in_db(posts_table, db_conn, user_id='abcd1234'): posts_table.insert({ 'id': 'jklm', 'created': r.now(), 'modified': r.now(), 'user_id': user_id, 'topic_id': 'wxyz7890', 'body': '''A Modest Proposal for Preventing the Children of Poor People From Being a Burthen to Their Parents or Country, and for Making Them Beneficial to the Publick.''', 'kind': 'post', }).run(db_conn)
def add_test_set(db_conn, users_table=None, units_table=None, responses_table=None, sets_table=None): """ Add doesn't require anything. Multiply requires add. Subtract requires add. Divide requires multiply. Add is done, Subtract needs review, Multiply needs to be learned, Divide needs to be diagnosed. """ if users_table: users_table.insert({"id": "user"}).run(db_conn) if units_table: units_table.insert( [ {"entity_id": "add", "status": "accepted", "created": r.now()}, {"entity_id": "subtract", "require_ids": ["add"], "status": "accepted", "created": r.now()}, {"entity_id": "multiply", "require_ids": ["add"], "status": "accepted", "created": r.now()}, { "entity_id": "divide", "require_ids": ["multiply", "subtract"], "status": "accepted", "created": r.now(), }, ] ).run(db_conn) if responses_table: responses_table.insert( [ {"user_id": "user", "unit_id": "add", "learned": 0.99, "created": r.now()}, {"user_id": "user", "unit_id": "multiply", "learned": 0.0, "created": r.now()}, {"user_id": "user", "unit_id": "subtract", "learned": 0.99, "created": r.time(2004, 11, 3, "Z")}, ] ).run(db_conn) if sets_table: sets_table.insert( { "entity_id": "set", "status": "accepted", "members": [ {"id": "add", "kind": "unit"}, {"id": "subtract", "kind": "unit"}, {"id": "multiply", "kind": "unit"}, {"id": "divide", "kind": "unit"}, ], } ).run(db_conn)
def find_message_history(past=600): selection = list(r.db(rdb['chatdb']).table('ko_ircs' ).order_by( #r.desc(r.row['meta']['updated_at']) index = r.desc('updated_at') ).filter( r.row['meta']['updated_at'].during(r.now() - int(past), r.now() + 3600) ).run(g.rdb_conn)) if selection is not None: #print("KOIrc.find_message_history: Retrieved Irc message from DB: {}".format(selection)) return selection else: return []
def db_found_disk(conn, device): """Adds a newly discovered disk to the presence database. :param device: The device to add """ disk_id = get_disk_sdinfo("/dev/{}".format(device)) # Update the disks record db_update_disk(conn, device) # noinspection PyUnusedLocal updated = r.db('wanwipe').table('machine_state').get(machine_state_uuid).update({'disks': { device: {'target': device, 'available': True, 'busy': False, 'disk_id': disk_id, 'updated_at': r.now(), 'discovered_at': r.now()}}, 'updated_at': r.now()}).run(conn) # Update the record timestamp.
def store(languages): """ Stores in database the result. If the result is equal to the latest row in the db it only updates the timestamp """ table = r.db('indielangs').table("languages") latest, latest_id = latest_result() if latest == languages: table.get(latest_id).update({'timestamp': r.now()}).run(DB) else: row = {'languages': languages, 'timestamp': r.now()} table.insert(row).run(DB)
def timer_fired(): """Do periodic housekeeping tasks. I'm a transient thread! """ conn = connect_db(None) # Assure this thread is connected to rethinkdb. try: r.now().run(conn, time_format="raw") # Ping the database first. except RqlDriverError: print("{}: Database connection problem. Reconnecting.".format(dt.isoformat(dt.now())), file=sys.stderr) conn = connect_db(None) # Make very sure we're connected to rethinkdb. db_refresh(conn) # Refresh the timestamp on the machine_state conn.close() print("{}: Waiting for device changes (press ctrl+c to exit)".format(dt.isoformat(dt.now()))) return True # To fire the timer again.
def find_tasks(past=600): selection = list(r.db(rdb['chatdb']).table('ko_tasks').filter( lambda this_message: this_message.has_fields({'title': True}) ).filter( lambda this_message: this_message.has_fields({'updated_at': True}) ).filter( r.row['updated_at'].during(r.now() - int(past), r.now() + 3600) ).order_by( r.desc(r.row['updated_at']) ).run(g.rdb_conn)) if selection is not None: #print("KOTask.find_tasks: Retrieved Tasks from DB: {}".format(selection)) return selection else: return []
def postJobs(): if request.method == 'GET': return redirect(url_for('add')) if request.method == 'POST': title = request.form['title'] link = request.form['link'] details = request.form['text'] if session.get('id', None): userid = session['id'] connection = r.connect('localhost', 28015) r.db('hackjobs').table('posts').insert({ 'userid': userid, 'title': title, 'time': r.now(), 'link': link, 'details': details }).run(connection) connection.close() else: return redirect(url_for('login', ))
async def web_hook_notification(request): if not request.match_info['token'] == url_token: return web.Response(status=404, text='{"error": "Not found"}', content_type='application/json') req_json = await request.json() # type: dict for event in req_json['events']: print(event['action']) if 'push' == event['action']: try: image = f"{event['target']['repository']}:" \ f"{event['target']['tag']}" except KeyError: return web.Response(status=422, text='{"error: invalid request body', content_type='application/json') logger.info(f'Received push from registry with image {image}') # insert data data = { 'image-name': image, 'event': 'new-image', 'status': 'ready', 'time': r.now().to_epoch_time() } await conn.run( conn.db().table('tasks').insert(data) ) return web.Response(status=202, text='{"status": "deploy added to queue"}', content_type='application/json') else: return web.Response(status=422, text='{"status": "Something went wrong"}', content_type='application/json')
def test_list_by_entity_ids(db_conn, sets_table): """ Expect to list sets by given entity IDs. """ sets_table.insert([{ 'entity_id': 'A1', 'name': 'A', 'body': 'Apple', 'created': r.now(), 'modified': r.now(), 'status': 'accepted', }, { 'entity_id': 'B2', 'name': 'B', 'body': 'Banana', 'created': r.now(), 'modified': r.now(), 'status': 'accepted', }, { 'entity_id': 'C3', 'name': 'C', 'body': 'Coconut', 'created': r.now(), 'modified': r.now(), 'status': 'accepted', }, { 'entity_id': 'D4', 'name': 'D', 'body': 'Date', 'created': r.now(), 'modified': r.now(), 'status': 'accepted', }]).run(db_conn) sets = Set.list_by_entity_ids(db_conn, ['A1', 'C3']) assert sets[0]['body'] in ('Apple', 'Coconut') assert sets[0]['body'] in ('Apple', 'Coconut')
def get_unit_location(unit_id): url = UNIT_LOCATION.format(unit_id) resp = requests.get(url) if resp.status_code == 200: try: data = resp.json() if 'BusLocationData' in data and data[ 'BusLocationData'] is not None: buslocation = data['BusLocationData'] return { 'date': r.now(), 'location': r.point(buslocation['Latitude'], buslocation['Longitude']), 'unit_id': buslocation['UnitID'], 'variant_id': buslocation['VariantId'], }, unit_id except: print('Error fetching unit location') return None, unit_id
def healthy_service(self, role): ''' Find least loaded healthy service in the registry. A service is considered healthy if its 'last_heartbeat' was less than 'ttl' seconds ago Args: role (str): role name Returns: the healthy service with the supplied `role` with the smallest value of 'load' ''' try: result = self.rr.table('services').filter({ "role": role }).filter(lambda svc: r.now().sub(svc["last_heartbeat"]) < svc[ "ttl"]).order_by("load")[0].run() return result except r.ReqlNonExistenceError: return None
def add_user(): form = UserForm(request.form) if form.validate_on_submit(): if not db.username_exist(form.username.data): email = form.email.data username = form.username.data password = sha256_crypt.encrypt(form.password.data) role = form.role.data user = { 'username': username, 'email': email, 'password': password, 'date_created': now(), 'type': role, 'is_active': True, } response = db.create_user(user) if response['inserted'] == 1: return redirect(url_for('admin.users')) else: flash('This username already exists.', 'error-message') return render_template('addUser.html', form=form)
def _get_employee_record(self, company_name, _id, keyword=None, profile_id=None): start_time = time.time() #conn = r.connect(host="localhost", port=28015, db="triggeriq") conn = rethink_conn.conn() res = self._employees(company_name, keyword) res["company_id"] = _id res["profile_id"] = profile_id print "EMPLOYEES FOUND", company_name, res.shape r.table('company_employees').insert(res.to_dict("r")).run(conn) epsc = "employee_search_completed" r.table("triggers").get(_id).update({epsc: r.now()}).run(conn) bitmapist.mark_event("function:time:company_employee_search", int((time.time() - start_time) * 10**6)) rd.zadd("function:time:company_employee_search", str((time.time() - start_time) * 10**6), arrow.now().timestamp)
def setGroupTopic(self, name, topic, author): """ Set the IRC channel's topic. :param name: the name of the group whose topic to set. :param topic: the topic string. :param author: the nickname of the author. Returns: The new group meta. """ return r.table(self.GROUPS_TABLE).get(name).update({ "meta": { "topic": topic, "topic_time": r.now(), "topic_author": author } }).run(self.conn)
def createGroup(self, name, channelType): """ Creates a new group metadata and group state. :param name: the name of the new group. :param channelType: the type of the group. Returns: The metadata of the new group, The state of the new group. """ assert name assert channelType exists = r.table(self.GROUPS_TABLE).get( name ).run(self.conn) if not exists: group = r.table(self.GROUPS_TABLE).insert({ "id": name, "name": name, "type": channelType, "meta": { "topic": "", "topic_author": "", "topic_time": r.now() }, }).run(self.conn) state = r.table(self.GROUP_STATES_TABLE).insert({ "id": name, "users": {} }).run(self.conn) return group, state else: log.err("Group already exists: %s" % name)
def test_get_user_posts(db_conn, session, posts_table): """ Expect to get user's 10 latest posts when requested in addition. """ posts_table.insert([{ 'created': r.now(), 'modified': r.now(), 'user_id': 'abcd1234', 'topic_id': 'fj2Ojfdskl2', 'body': '''A Modest Proposal for Preventing the Children of Poor People From Being a Burthen to Their Parents or Country, and for Making Them Beneficial to the Publick.''', 'kind': 'post', }, { 'created': r.now(), 'modified': r.now(), 'user_id': 'gjrklj15431', 'topic_id': 'fj2Ojfdskl2', 'body': '''A Modest Proposal for Preventing the Children of Poor People From Being a Burthen to Their Parents or Country, and for Making Them Beneficial to the Publick.''', 'kind': 'post', }, { 'created': r.now(), 'modified': r.now(), 'user_id': 'abcd1234', 'topic_id': 'fj2Ojfdskl2', 'body': '''A Modest Proposal for Preventing the Children of Poor People From Being a Burthen to Their Parents or Country, and for Making Them Beneficial to the Publick.''', 'kind': 'post', }]).run(db_conn) request = { 'params': { 'posts': True }, 'cookies': { 'session_id': session }, 'db_conn': db_conn } code, response = routes.user.get_user_route(request, 'abcd1234') assert 'posts' in response assert len(response['posts']) == 2
def streaming_accuracy(now, rdd, topic, re_table, conn, attributes, rule): try: tic = time.clock() rdd.cache() print "=========%s========" % str(now) spark = getSparkSessionInstance(rdd.context.getConf()) # rowRdd = rdd.map(lambda w: json.loads(w)) result = dict() Attr = Row('group', *attributes) rowRdd = rdd.map(lambda w: w[1].split("|")) target = rowRdd.filter(lambda row: row[0] == 'target').map( lambda p: Attr(*p)) source = rowRdd.filter(lambda row: row[0] == 'source').map( lambda p: Attr(*p)) total = target.count() df = spark.createDataFrame(target) df.createOrReplaceTempView('target') df2 = spark.createDataFrame(source) df2.createOrReplaceTempView('source') match = spark.sql( "select count(*) as match from target, source where " + rule) match.show() result['topic'] = topic result['summary'] = match.toPandas().to_dict('records') result['summary'][0]['miss'] = total - result['summary'][0]['match'] result['summary'][0]['total'] = total result['time'] = r.now().to_iso8601() toc = time.clock() result['run_time'] = toc - tic print result re_table.insert(result).run(conn) rdd.unpersist() except: print("Unexpected error:", sys.exc_info()) pass
def streaming(now, rdd, topic, re_table, conn, attributes): try: tic = time.clock() rdd.cache() # print "=========%s========" % str(now) spark = getSparkSessionInstance(rdd.context.getConf()) # rowRdd = rdd.map(lambda w: json.loads(w)) result = dict() rowRdd = rdd.map(lambda w: w[1].split(",")).map( lambda p: Row(a=int(p[0]), b=int(p[1]), c=int(p[2]))) df = spark.createDataFrame(rowRdd) summary = get_attributes(df, attributes) result['topic'] = topic result['summary'] = summary result['time'] = r.now().to_iso8601() toc = time.clock() result['run_time'] = toc - tic re_table.insert(result).run(conn) rdd.unpersist() except: print("Unexpected error:", sys.exc_info()) pass
def create_ride(sender_id): if request.method == 'GET': return "create rider" if request.method == 'POST': from_place = request.form['from'] to = request.form['to'] conn = create_connection() data = list( r.db('udio').table('rides').insert([{ 'sender_id': sender_id, 'from_place': from_place, 'to_place': to, 'date': r.now(), 'completed': 0, 'rider_id': None }]).run(conn)) print(data[0]['id']) user = r.db('udio').table('users').filter({ 'id': session['user'][0]['id'] }).prepend(data[0]['id']).run(conn) close_connection(conn) return "rider created"
def insert_to_db(entry, data_type, conn): """Insert user or group individually to RethinkDB from dict of data and begins delta sync timer.""" if data_type == "user": standard_entry = inbound_user_filter(entry, "ldap") elif data_type == "group": standard_entry = inbound_group_filter(entry, "ldap") else: LOGGER.warning("unsupported data type: %s", data_type) return inbound_entry = { "data": standard_entry, "data_type": data_type, "sync_type": "initial", "timestamp": r.now(), "provider_id": LDAP_DC, } LOGGER.debug( "Inserting LDAP %s into inbound queue: %s", data_type, standard_entry["remote_id"], ) r.table("inbound_queue").insert(inbound_entry).run(conn)
def request_key(): user = session['user'] if request.method == 'GET': return render_template('request.html') elif request.method == 'POST': name = request.form.get('name', None) servers = request.form.get('servers', None) reason = request.form.get('reason', None) app_type = request.form.get('type', None) link = request.form.get('link', None) description = request.form.get('description', None) tos = request.form.get('tos', False) consent = request.form.get('consent', False) if not reason or not name or not link or not app_type or not description or not tos: result = 'Please make sure you have entered a name, description, type, server count, link, description and have accepted our TOS before submitting your application' return render_template('result.html', result=result, success=False) if not link.startswith('http'): return render_template('result.html', result='URL must use HTTP(S) scheme!', success=False) r.table('applications').insert({ "owner": user['id'], "email": user['email'], "name": name, "servers": servers, "description": description, "link": link, "type": app_type, "email_consent": consent, "owner_name": f'{user["username"]}#{user["discriminator"]}', "reason": reason, "time": r.now() }).run(get_db()) result = 'Application Submitted 👌' return render_template('result.html', result=result, success=True)
def createGroup(self, name, channelType): """ Create an IRC channel (if it doesn't exist yet) in the channels table Fields for the channels table are: name (string) the name of the channel owner (string) the owner (by nickname) of the channel type (string) public or private topic (dict) dict of topic message, topic author, topic time messages (array of dicts) each element (message) contains message time, message author, and message contents """ assert name assert channelType exists = r.table(self.GROUPS_TABLE).get(name).run(self.conn) if not exists: group = r.table(self.GROUPS_TABLE).insert({ "id": name, "name": name, "type": channelType, "meta": { "topic": "", "topic_author": "", "topic_time": r.now() }, "messages": [] }).run(self.conn) state = r.table(self.GROUP_STATES_TABLE).insert({ "id": name, "users": {} }).run(self.conn) return group, state else: log.err("Group already exists: %s" % name)
def add(): if request.method == 'GET': if session.get('id', None): return render_template('add.html', logout='logout') else: return redirect(url_for('login')) elif request.method == 'POST': title = request.form['title'] link = request.form['link'] text = request.form['text'] bot = modBot(title, link, text) if session.get('id', None): userid = session.get('id', '') else: return redirect(url_for('login')) if bot.check() == True: connection = r.connect('localhost', 28015) r.db('hackjobs').table('post').insert({ 'title': title, 'link': link, 'text': text, 'userid': userid, 'time': r.now() }).run(connection) app.logger.error('New post added:' + title + ' Link:' + link + ' ' + ' Text:' + text) return redirect(url_for('home')) else: flash( "Submission has been removed by ''autoModeratorBot'' for vulgar or invalid content" ) return render_template('add.html')
def on_message(self, message): """" When a message is received """ user_emails = [] if st.SEND_EMAILS: exchange = message.get('metadata').get('exchange') routing_key = message.get('metadata').get('routing_key', '') bus_filter = self.filters_handler.get_by_exchange_key( exchange, routing_key) if bus_filter: for sub in self.subscriptions_handler.get_by_filter( bus_filter): user = self.users_handler.get(sub['user_id']) template = self.templates_handler.get(sub['template_id']) if template: subject, text = self.create_email(template, message) user_filter = template.get('user_filter') if user_filter: user_name = message.get(user_filter) user_searched = self.users_handler.get(user_name) if user_searched: st.logger.info('Notification to: %r', user_searched['email']) user_emails.append(user_searched['email']) else: st.logger.info('Notification to: %r', user['email']) user_emails.append(user['email']) else: subject, text = self.get_default_template( template, message) now = r.now().to_iso8601() self.smtp.send(user_emails, subject, text) self.archive_message(bus_filter.get('exchange'), now, user_emails, subject)
def test_list_required_by(db_conn, cards_table): """ Expect to list all the entity that require the given one. """ cards_table.insert([{ 'entity_id': 'abcd', 'unit_id': 'zytx', 'created': r.now(), 'modified': r.now(), 'status': 'accepted', 'kind': 'video', 'requires': ['zxyz'], }, { 'entity_id': 'abcd', 'unit_id': 'zytx', 'created': r.time(1986, 11, 3, 'Z'), 'modified': r.time(1986, 11, 3, 'Z'), 'status': 'accepted', 'kind': 'video', }, { 'entity_id': 'zxyz', 'unit_id': 'zytx', 'created': r.now(), 'modified': r.now(), 'status': 'accepted', 'kind': 'video', }, { 'entity_id': 'qwer', 'unit_id': 'zytx', 'created': r.now(), 'modified': r.now(), 'status': 'accepted', 'kind': 'choice', 'requires': ['abcd'], }]).run(db_conn) cards = Card.list_required_by(db_conn, 'abcd') assert len(cards) == 1 assert cards[0]['entity_id'] == 'qwer'
def test_get_follows(db_conn, session, follows_table): """ Expect to get a list of follows for user. """ follows_table.insert([{ 'user_id': 'JFldl93k', 'created': r.now(), 'modified': r.now(), 'entity': { 'kind': 'card', 'id': 'JFlsjFm', }, }, { 'user_id': 'abcd1234', 'created': r.now(), 'modified': r.now(), 'entity': { 'kind': 'card', 'id': 'JFlsjFm', }, }, { 'user_id': 'abcd1234', 'created': r.now(), 'modified': r.now(), 'entity': { 'kind': 'unit', 'id': 'u39Fdjf0', }, }]).run(db_conn) request = { 'cookies': { 'session_id': session }, 'params': {}, 'db_conn': db_conn, } code, response = routes.follow.get_follows_route(request) assert code == 200 assert len(response['follows']) == 2
}]).run(database.db_conn)) (database.db.table('users_sets').insert([{ 'id': 'doris-sets', 'created': r.time(2014, 1, 1, 'Z'), 'modified': r.time(2014, 1, 1, 'Z'), 'user_id': 'doris', 'set_ids': ['basic-math'], }]).run(database.db_conn)) # id, created, modified # user_id, card_id, unit_id, response, score, learned if len(argv) > 1 and argv[1] == 'learn_mode': (database.db.table('responses').insert([{ 'id': 'response1', 'created': r.now(), 'modified': r.now(), 'user_id': 'doris', 'card_id': 'plus-choice-a', 'unit_id': 'plus', 'response': 1, 'score': 1, 'learned': 1, }, { 'id': 'response2', 'created': r.now(), 'modified': r.now(), 'user_id': 'doris', 'card_id': 'minus-choice-a', 'unit_id': 'minus', 'response': 1,
logging.info(datetime.now().strftime('%Y-%m-%d %H:%M:%S')) rdb_config = get_conf() api = init() tweet_ids = [] n = 100 #Update all Tweets that were posted minuteCount from now minuteCount = 60 * 24 * 2 try: with r.connect(**rdb_config) as conn: tweet_ids = r.table('tweets').between( (r.now() - minuteCount * 60).to_iso8601(), r.now().to_iso8601(), index='created_at').filter(lambda tweet: (~tweet.has_fields( 'retweeted_status'))).get_field('id_str').coerce_to( 'array').run(conn) except Exception as e: logging.error("Cannot get current Tweets in db. Error message: %s", e) raise chunked_tweet_ids = [tweet_ids[i:i + n] for i in range(0, len(tweet_ids), n)] try: for i in range(len(chunked_tweet_ids)): with r.connect(**rdb_config) as conn: for tweet in api.statuses_lookup(chunked_tweet_ids[i]): r.table('tweets').get(tweet._json['id_str']).update({
def fetch_expired_roles(user_id): """Fetch expired role memberships of given user""" return (r.table("role_members").filter( lambda doc: (doc["identifiers"].contains(user_id)) & (doc["expiration_date"] <= r.now())).get_field("role_id").coerce_to( "array"))
def process(rec, conn): """ Process inbound queue records """ try: # Changes members from distinguished name to next_id for roles if "members" in rec["data"]: rec = translate_field_to_next(rec, "members") if "owners" in rec["data"]: rec = translate_field_to_next(rec, "owners") add_transaction(rec) if "batch" not in rec or not rec["batch"]: r.table("inbound_queue").get(rec["id"]).delete().run(conn) rec["sync_direction"] = "inbound" r.table("sync_errors").insert(rec).run(conn) return batch = batch_pb2.Batch() batch.ParseFromString(rec["batch"]) batch_list = batch_to_list(batch=batch) status = ClientSync().send_batches_get_status(batch_list=batch_list) if status[0]["status"] == "COMMITTED": if rec["data_type"] == "user": insert_to_user_mapping(rec) if "metadata" in rec and rec["metadata"]: data = { "address": rec["address"], "object_type": rec["object_type"], "object_id": rec["object_id"], "provider_id": rec["provider_id"], "created_at": r.now(), "updated_at": r.now(), **rec["metadata"], } query = ( r.table("metadata") .get(rec["address"]) .replace( lambda doc: r.branch( # pylint: disable=singleton-comparison (doc == None), # noqa r.expr(data), doc.merge( {"metadata": rec["metadata"], "updated_at": r.now()} ), ) ) ) result = query.run(conn) if (not result["inserted"] and not result["replaced"]) or result[ "errors" ] > 0: LOGGER.warning( "error updating metadata record:\n%s\n%s", result, query ) rec["sync_direction"] = "inbound" r.table("changelog").insert(rec).run(conn) r.table("inbound_queue").get(rec["id"]).delete().run(conn) else: rec["error"] = get_status_error(status) rec["sync_direction"] = "inbound" r.table("sync_errors").insert(rec).run(conn) r.table("inbound_queue").get(rec["id"]).delete().run(conn) except Exception as err: # pylint: disable=broad-except LOGGER.exception( "%s exception processing inbound record:\n%s", type(err).__name__, rec ) LOGGER.exception(err)
def __init__(self, name): self.name = name self.birthtime = r.now() self._type = "settings" self.settings = {}
def __init__(self, attribute_id, measurement_id): self.attribute_id = attribute_id self.measurement_id = measurement_id self.when = r.now() self._type = "best_measure_history"
def update_modified(field): return r.now()
def filter_func(doc): now = r.now() prev = doc['last_scraped'] diff = now - prev return doc['url'] == url and diff.div(60 * 60) >= 5
def __init__(self, title, body, when=""): self.title = title self.body = body self.date = when if when else r.now()
def UpdateURL(rdb, url): r.table('urldata').filter({'url': url}).update({'last_scraped': r.now()})