def delete_group(l): """Delete an existing group.""" user = acquire_user({"usertype": "teacher"}) if not user: l.interrupt() login(l, username=user["username"], password=user["password"]) simulate_loading_classroom_page(l) res = l.client.get(GROUPS_ENDPOINT) groups = res.json() if len(groups) < 1: l.interrupt() group = random.choice(groups) with l.client.delete( GROUPS_ENDPOINT + "/" + group["gid"], name=GROUPS_ENDPOINT + "/[gid]", headers={"X-CSRF-Token": l.client.cookies["token"]}, catch_response=True, ) as res: if res.status_code == 200: res.success() get_db().groups.delete_one({"group_name": group["name"]}) else: res.failure("Failed to delete team: " + str(res.json())) logout(l) release_user(user["username"]) l.interrupt()
def change_password(l): """Change a user's password.""" user = acquire_user() if not user: l.interrupt() login(l, username=user['username'], password=user['password']) simulate_loading_profile_page(l) new_password = get_password() with l.client.post( USER_PASSWORD_CHANGE_ENDPOINT, json={ 'current_password': user['password'], 'new_password': new_password, 'new_password_confirmation': new_password, }, headers={'X-CSRF-Token': l.client.cookies['token']}, catch_response=True) as res: if res.status_code == 200: get_db().users.find_one_and_update( {'username': user['username']}, {'$set': { 'password': new_password }}) res.success() else: res.failure('Failed to change password: '******'username'], password=new_password) logout(l) release_user(user['username']) l.interrupt()
def change_password(l): """Change a user's password.""" user = acquire_user() if not user: l.interrupt() login(l, username=user["username"], password=user["password"]) simulate_loading_profile_page(l) new_password = get_password() with l.client.post( USER_PASSWORD_CHANGE_ENDPOINT, json={ "current_password": user["password"], "new_password": new_password, "new_password_confirmation": new_password, }, headers={"X-CSRF-Token": l.client.cookies["token"]}, catch_response=True, ) as res: if res.status_code == 200: get_db().users.find_one_and_update( {"username": user["username"]}, {"$set": { "password": new_password }}, ) res.success() else: res.failure("Failed to change password: "******"username"], password=new_password) logout(l) release_user(user["username"]) l.interrupt()
def delete_account(l): """Delete a user's account.""" user = acquire_user() if not user: l.interrupt() login(l, username=user["username"], password=user["password"]) simulate_loading_profile_page(l) with l.client.post( USER_DELETE_ACCOUNT_ENDPOINT, json={"password": user["password"]}, headers={"X-CSRF-Token": l.client.cookies["token"]}, catch_response=True, ) as res: if res.status_code == 200: get_db().users.find_one_and_update( {"username": user["username"]}, {"$set": { "deleted": True }}) res.success() else: res.failure("Failed to delete account: " + str(res.json())) release_user(user["username"]) l.interrupt()
def delete_group(l): """Delete an existing group.""" user = acquire_user({'usertype': 'teacher'}) if not user: l.interrupt() login(l, username=user['username'], password=user['password']) simulate_loading_classroom_page(l) res = l.client.get(GROUPS_ENDPOINT) groups = res.json() if len(groups) < 1: l.interrupt() group = random.choice(groups) with l.client.delete( GROUPS_ENDPOINT + '/' + group['gid'], name=GROUPS_ENDPOINT + '/[gid]', headers={'X-CSRF-Token': l.client.cookies['token']}, catch_response=True) as res: if res.status_code == 200: res.success() get_db().groups.delete_one({'group_name': group['name']}) else: res.failure('Failed to delete team: ' + str(res.json())) logout(l) release_user(user['username']) l.interrupt()
def join_team(l): """Join an existing team with an open space.""" user = acquire_user({ "usertype": { "$in": ["student", "college", "other"] }, "on_team": { "$in": [False, None] }, }) if not user: l.interrupt() login(l, username=user["username"], password=user["password"]) simulate_loading_profile_page(l) # Sometimes fails due to race condition - another thread can # push a team over the max size while trying to join it team = get_db().teams.find_one({ "number_of_members": { "$lt": MAX_TEAM_SIZE }, "rand_id": { "$near": [random.random(), 0] }, }) if not team: l.interrupt() with l.client.post( JOIN_TEAM_ENDPOINT, json={ "team_name": team["team_name"], "team_password": team["team_password"], }, catch_response=True, ) as res: if res.status_code == 200: get_db().users.find_one_and_update( {"username": user["username"]}, { "$set": { "on_team": True, "team_name": team["team_name"] } }, ) get_db().teams.find_one_and_update( {"team_name": team["team_name"]}, {"$inc": { "number_of_members": 1 }}, ) res.success() else: res.failure("Failed to join team: " + str(res.json())) logout(l) release_user(user["username"]) l.interrupt()
def join_team(l): """Join an existing team with an open space.""" user = acquire_user({ 'usertype': { '$in': ['student', 'college', 'other'] }, 'on_team': { '$in': [False, None] } }) if not user: l.interrupt() login(l, username=user['username'], password=user['password']) simulate_loading_profile_page(l) # Sometimes fails due to race condition - another thread can # push a team over the max size while trying to join it team = get_db().teams.find_one({ 'number_of_members': { '$lt': MAX_TEAM_SIZE }, 'rand_id': { '$near': [random.random(), 0] } }) if not team: l.interrupt() with l.client.post(JOIN_TEAM_ENDPOINT, json={ 'team_name': team['team_name'], 'team_password': team['team_password'] }, catch_response=True) as res: if res.status_code == 200: get_db().users.find_one_and_update( {'username': user['username']}, { '$set': { 'on_team': True, 'team_name': team['team_name'] } }) get_db().teams.find_one_and_update( {'team_name': team['team_name']}, {'$inc': { 'number_of_members': 1 }}) res.success() else: res.failure('Failed to join team: ' + str(res.json())) logout(l) release_user(user['username']) l.interrupt()
def successfully_register(l): """Register a valid test user and store credentials in DB.""" user_demographics = generate_user() with l.client.post(REGISTRATION_ENDPOINT, json=user_demographics, catch_response=True) as res: if res.status_code == 201: user_document = user_demographics.copy() user_document['rand_id'] = [random.random(), 0] get_db().users.insert_one(user_document) res.success() else: res.failure('Failed to register user') raise StopLocust # Terminate after successful registration
def successfully_register(l): """Register a valid test user and store credentials in DB.""" simulate_loading_login_page(l) user_demographics = generate_user() with l.client.post(REGISTRATION_ENDPOINT, json=user_demographics, catch_response=True) as res: if res.status_code == 201: user_document = user_demographics.copy() user_document['rand_id'] = [random.random(), 0] get_db().users.insert_one(user_document) res.success() else: res.failure('Failed to register user: ' + str(res.json())) l.interrupt()
def join_group(l): """Join an existing group.""" user = acquire_user({"usertype": "student"}) if not user: l.interrupt() login(l, username=user["username"], password=user["password"]) simulate_loading_profile_page(l) group = get_db().groups.find_one( {"rand_id": { "$near": [random.random(), 0] }}) if not group: l.interrupt() with l.client.post( JOIN_GROUP_ENDPOINT, json={ "group_name": group["group_name"], "group_owner": group["group_owner"], }, headers={"X-CSRF-Token": l.client.cookies["token"]}, catch_response=True, ) as res: # Not the best way to deal with joining duplicate groups if res.status_code in [200, 409]: res.success() else: res.failure("Failed to join team: " + str(res.json())) logout(l) release_user(user["username"]) l.interrupt()
def setup(l): """Retrieve all problem flags as an admin user (runs once).""" get_db().problems.delete_many({}) login(l, username=ADMIN_USERNAME, password=ADMIN_PASSWORD) all_problems = l.client.get(PROBLEMS_ENDPOINT + "?unlocked_only=false").json() flag_maps = [] for problem in all_problems: flag_maps.append({ "pid": problem["pid"], "flags": [i["flag"] for i in problem["instances"]], "rand_id": [random.random(), 0], }) get_db().problems.insert_many(flag_maps) logout(l)
def get_marks_for(user_id): db = get_db() keys = [ 'mark_id', 'mark_author', 'mark_name', 'notes', 'location', 'user_id' ] marks = None print(user_id) #pdb.set_trace() try: marks = db.execute( ''' SELECT * FROM mark WHERE user_id=? ''', (user_id, )) marks = marks.fetchall() db.close() except Exception as inst: raise inst else: result = [] for mark in marks: temp = dict(zip(keys, mark)) result.append(temp) return result
def setup(l): """Retrieve all problem flags as an admin user (runs once).""" get_db().problems.delete_many({}) login(l, username=ADMIN_USERNAME, password=ADMIN_PASSWORD) all_problems = l.client.get(PROBLEMS_ENDPOINT + '?unlocked_only=false').json() flag_maps = [] for problem in all_problems: flag_maps.append({ 'pid': problem['pid'], 'flags': [i['flag'] for i in problem['instances']], 'rand_id': [random.random(), 0] }) get_db().problems.insert_many(flag_maps) logout(l)
def join_group(l): """Join an existing group.""" user = acquire_user({'usertype': 'student'}) if not user: l.interrupt() login(l, username=user['username'], password=user['password']) simulate_loading_profile_page(l) group = get_db().groups.find_one( {'rand_id': { '$near': [random.random(), 0] }}) if not group: l.interrupt() with l.client.post( JOIN_GROUP_ENDPOINT, json={ 'group_name': group['group_name'], 'group_owner': group['group_owner'] }, headers={'X-CSRF-Token': l.client.cookies['token']}, catch_response=True) as res: # Not the best way to deal with joining duplicate groups if res.status_code in [200, 409]: res.success() else: res.failure('Failed to join team: ' + str(res.json())) logout(l) release_user(user['username']) l.interrupt()
def get_podcast_info(url): con = config.get_db() cur = con.cursor() row = cur.execute( """select title, author, image_url from podcasts where url=?""", (url, )).fetchone() if row is not None: return row session = HTMLSession() r = session.get(url) title = r.html.find('h1.tok-topwrap__h1', first=True).full_text image = r.html.find('.tok-topwrap__topwrap .tok-topwrap__img img', first=True) image_src = '' if 'src' in image.attrs: image_src = image.attrs['data-src'] image_file = make_podcast_image(image_src, title) info_fields = r.html.find('.tok-topwrap__topwrap .tok-divTableRow') author = '' for i, field in enumerate(info_fields): label = field.find('.tok-topwrap__label', first=True).full_text if label.find('Prowadzący') > -1: author = field.find('a', first=True).full_text return {'title': title, 'author': author, 'image_url': image_file}
def __init__(self, username, password, **kwargs): JabberBot.__init__(self, username, password, **kwargs) self.PING_FREQUENCY = 60 self.content_commands = {} self.add_content_commands() self.db = get_db() self.rlock = RLock() self.last_message = {}
def create_team(l): """Create a custom team for a user.""" user = acquire_user({ "usertype": { "$in": ["student", "college", "other"] }, "on_team": { "$in": [False, None] }, }) if not user: l.interrupt() login(l, username=user["username"], password=user["password"]) simulate_loading_profile_page(l) team_name = get_team_name() team_password = get_password() with l.client.post( CREATE_TEAM_ENDPOINT, json={ "team_name": team_name, "team_password": team_password }, catch_response=True, ) as res: if res.status_code == 201: get_db().users.find_one_and_update( {"username": user["username"]}, {"$set": { "on_team": True, "team_name": team_name }}, ) get_db().teams.insert_one({ "team_name": team_name, "team_password": team_password, "number_of_members": 1, "rand_id": [random.random(), 0], }) res.success() else: res.failure("Failed to create custom team: " + str(res.json())) logout(l) release_user(user["username"]) l.interrupt()
def release_user(username): """Release a test user for usage by other threads.""" res = get_db().users.find_one_and_update({'username': username}, {'$set': { 'in_use': False }}) if not res: raise Exception("Could not release user " + str(username))
def set_data_to_db(SQL_QUERY): db = get_db() cursor = db.cursor() cursor.execute(SQL_QUERY) db.commit() data = cursor.lastrowid cursor.close() return data
def create_group(l): """Create a new group.""" user = acquire_user({ "usertype": "teacher", "$or": [ { "groups_created": None }, { "groups_created": { "$lt": GROUP_LIMIT } }, ], }) if not user: l.interrupt() login(l, username=user["username"], password=user["password"]) simulate_loading_classroom_page(l) group_name = get_group_name() with l.client.post( CREATE_GROUP_ENDPOINT, json={"name": group_name}, headers={"X-CSRF-Token": l.client.cookies["token"]}, catch_response=True, ) as res: if res.status_code == 201: get_db().users.find_one_and_update( {"username": user["username"]}, {"$inc": { "groups_created": 1 }}) get_db().groups.insert_one({ "group_name": group_name, "group_owner": user["username"], "rand_id": [random.random(), 0], }) res.success() else: res.failure("Failed to create group: " + str(res.json())) logout(l) release_user(user["username"]) l.interrupt()
def inner(*args, **kw): db = get_db() try: func(*args, db=db, **kw) db.commit() except Exception as inst: raise inst else: db.close()
def get_user_id(email): db = get_db() try: user = db.execute('''SELECT user_id FROM user where email=?''', (email, )) except sqlite3.IntegrityError as inst: raise inst else: return user.fetchone()
def create_team(l): """Create a custom team for a user.""" user = acquire_user({ 'usertype': { '$in': ['student', 'college', 'other'] }, 'on_team': { '$in': [False, None] } }) if not user: l.interrupt() login(l, username=user['username'], password=user['password']) simulate_loading_profile_page(l) team_name = get_team_name() team_password = get_password() with l.client.post(CREATE_TEAM_ENDPOINT, json={ 'team_name': team_name, 'team_password': team_password }, catch_response=True) as res: if res.status_code == 201: get_db().users.find_one_and_update( {'username': user['username']}, {'$set': { 'on_team': True, 'team_name': team_name }}) get_db().teams.insert_one({ 'team_name': team_name, 'team_password': team_password, 'number_of_members': 1, 'rand_id': [random.random(), 0] }) res.success() else: res.failure('Failed to create custom team: ' + str(res.json())) logout(l) release_user(user['username']) l.interrupt()
def create_group(l): """Create a new group.""" user = acquire_user({ 'usertype': 'teacher', '$or': [{ 'groups_created': None }, { 'groups_created': { '$lt': GROUP_LIMIT } }] }) if not user: l.interrupt() login(l, username=user['username'], password=user['password']) simulate_loading_classroom_page(l) group_name = get_group_name() with l.client.post( CREATE_GROUP_ENDPOINT, json={'name': group_name}, headers={'X-CSRF-Token': l.client.cookies['token']}, catch_response=True) as res: if res.status_code == 201: get_db().users.find_one_and_update( {'username': user['username']}, {'$inc': { 'groups_created': 1 }}) get_db().groups.insert_one({ 'group_name': group_name, 'group_owner': user['username'], 'rand_id': [random.random(), 0] }) res.success() else: res.failure('Failed to create group: ' + str(res.json())) logout(l) release_user(user['username']) l.interrupt()
def create_fake_data_status(data_node: DataNode): now = get_second_datetime() db = get_db() for i in range(100): status = DataNodeStatus( # 每十次宕机一次 dead=i % 10 == 0, capacity=1000, used=100 + (i % 7), datetime=now + timedelta(minutes=i)) db.save(data_node.node_id, status)
def delete_marks_for(user_id, db=None): if db is None: db = get_db() try: db.execute(''' DELETE FROM mark where user_id=? ''', (user_id, )) except sqlite3.IntegrityError as inst: raise inst else: return True
def delete(student): if not isinstance(student, dict): print 'Student must be of dict type' return None cfg = config.load_config() for schema in cfg['create']['KeySchema']: if schema['AttributeName'] not in student: print 'All keys must be in student' return None db = config.get_db(cfg) table = config.get_table(db, cfg) response = table.delete_item(Key=student) return response
def get(student): if not isinstance(student, dict): print ("Student must be of dict type") return None cfg = config.load_config() for schema in cfg["create"]["KeySchema"]: if schema["AttributeName"] not in student: print "All keys must be in student" return None db = config.get_db(cfg) table = config.get_table(db, cfg) response = table.get_item(Key=student) return response
def acquire_user(properties={}): """Retrieve an available test user with the specified properties.""" properties['in_use'] = {'$in': [False, None]} properties['deleted'] = {'$in': [False, None]} properties['rand_id'] = {'$near': [random.random(), 0]} user = get_db().users.find_one_and_update(properties, {'$set': { 'in_use': True }}, {'_id': 0}) if not user: raise Exception("Could not acquire user with properties " + str(properties)) return user
def login(): if request.method == 'GET': return render_template('login.html', title = u"Вход") username = request.form['username'] password = request.form['password'] db = get_db() cursor = db.cursor() count = cursor.execute("SELECT username, password FROM user WHERE username = %s AND password = %s", (username, password)) if not count: return registration_failed_redirection() registered_user = cursor.fetchone() login_user(User(registered_user[0], registered_user[1])) flash(u"Вход выполнен, %s"%current_user) return redirect(request.args.get('next') or url_for('index'))
def load_db(): db = None notify = True try: db = get_db() except: db = None today = datetime.now().astimezone().strftime("%Y-%m-%d") if db and db.date != today: db = None if db is None: notify = False db = to_DB(today) return (notify, db)
def save(self, collection_name=None): if collection_name is None: collection_name = self.__class__.__name__ try: collection = config.get_db()[collection_name] if not "_id" in self: result = collection.insert_one(self) else: result = collection.update_one({"_id": ObjectId(self["_id"])}, {"$set": self}, upsert=True) return result except OperationFailure as e: print(f"Database operation failed: {e}") return None
def load_from_json_dict(json_dict): cfg = config.load_config() db = config.get_db(cfg) table = config.get_table(db, cfg) for student in json_dict: print 'Adding student:', student try: table.put_item( Item=student, ConditionExpression=Attr('SSN').ne(student['SSN']) ) except ClientError as e: if e.response['Error']['Code'] == 'ConditionalCheckFailedException': print(e.response['Error']['Message']) else: raise
def get_imdh_data(lat, long, n, variable): #get list of lat longs start_lat = lat - 0.25*n end_lat = lat + 0.25*n start_long = long - 0.25 * n end_long = long + 0.25 * n a1_lat = np.arange(start_lat, lat, 0.25) a2_lat = np.arange(lat, (end_lat+0.25), 0.25) a1_long = np.arange(start_long, long, 0.25) a2_long = np.arange(long, (end_long + 0.25), 0.25) lats = list(a1_lat) + list(a2_lat) longs = list(a1_long) + list(a2_long) ''' start_lat = lat - 1 * n end_lat = lat + 1 * n start_long = long - 1 * n end_long = long + 1 * n a1_lat = np.arange(start_lat, lat, 1) a2_lat = np.arange(lat, (end_lat + 1), 1) a1_long = np.arange(start_long, long, 1) a2_long = np.arange(long, (end_long + 1), 1) lats = list(a1_lat) + list(a2_lat) longs = list(a1_long) + list(a2_long) ''' # extract data from database online db = config.get_db() imdhist = db.imdhist imdhist.create_index("lt") # 25.0,25.25,25.5,25.75,26.00 92.0,92.25,92.5,92.75,93.0 pipeline = [ {"$match": {"id": variable, "lt": {"$in": lats }, "ln": {"$in": longs }}}, {"$group": {"_id": "$ts", "val": {"$push": "$val"}, "lat": {"$push": "$lt"}, "long": {"$push": "$ln"} }}, {"$sort": SON([("_id", 1)])} ] imdh = list(imdhist.aggregate(pipeline, allowDiskUse=True)) ''' pipeline_temp = [ {"$match": {"id": "t", "lt": {"$in": lats}, "ln": {"$in": longs}}}, {"$group": {"_id": "$ts", "val": {"$push": "$val"}, "lat": {"$push": "$lt"}, "long": {"$push": "$ln"}}}, {"$sort": SON([("_id", 1)])} ] imdh_temp = list(imdhist.aggregate(pipeline_temp, allowDiskUse=True))''' return imdh
def add(student): if not isinstance(student, dict): print 'Student must be of dict type' return None cfg = config.load_config() for schema in cfg['create']['KeySchema']: if schema['AttributeName'] not in student: print 'All key must be in student' return None db = config.get_db(cfg) table = config.get_table(db, cfg) try: response = table.put_item( Item=student, ConditionExpression=Attr('SSN').ne(student['SSN']) ) except ClientError as e: if e.response['Error']['Code'] == 'ConditionalCheckFailedException': print(e.response['Error']['Message']) return False else: raise return response
def update(student, update): if not isinstance(student, dict) or not isinstance(update, dict): print 'Student or update must be of dict type' return None cfg = config.load_config() for schema in cfg['create']['KeySchema']: if schema['AttributeName'] not in student: print 'All key must be in student' return None db = config.get_db(cfg) table = config.get_table(db, cfg) exp = [] exp_attr_vals = {} for attr in update: var = ':' + ''.join(attr.split('.')) exp.append(attr + ' = ' + var) exp_attr_vals[var] = update[attr] update_exp = 'SET ' + ','.join(exp) response = table.update_item( Key=student, UpdateExpression=update_exp, ExpressionAttributeValues=exp_attr_vals, ReturnValues='UPDATED_NEW') return response
#!/usr/bin/python # -*- coding: utf-8 -*- import pymongo import config from datetime import datetime from datetime import timedelta import time from bson.son import SON import dailysums import json import urllib import urllib2 import re db = config.get_db() sensors = db.sensors ids = db.ids sm = db.smnew farmers = db.farmers forecasts = db.forecasts testing = True locs = ["gaya"] ids = [205] # Pull yesterday and today's weather, to allow dailysums function to operate properly, as # it needs two days to sum rainfall values for one day yesterday = datetime.combine(datetime.now().date() - timedelta(days=1), datetime.min.time())
import boto3 import json import config from boto3.dynamodb.conditions import Key, Attr from botocore.exceptions import ClientError json_file = 'studentsdata.json' with open(json_file) as fp: json_dict = json.load(fp) cfg = config.load_config() db = config.get_db(cfg) table = config.get_table(db, cfg) for student in json_dict: print('Adding student:', student) try: table.put_item( Item=student, ConditionExpression=Attr('SSN').ne(student['SSN']) ) except ClientError as e: if e.response['Error']['Code'] == 'ConditionalCheckFailedException': print(e.response['Error']['Message']) else: raise
def get_all(): cfg = config.load_config() db = config.get_db(cfg) table = config.get_table(db, cfg) response = table.scan() return response
def init(): # make zmq connections ctx = zmq.Context() # sub to SUICIDE address command = ctx.socket(zmq.SUB) command.linger = LINGER command.setsockopt(zmq.SUBSCRIBE, '') command.connect(CONFIG['command']) # connect to CHECKUP rep address checkup = ctx.socket(zmq.REP) checkup.linger = LINGER checkup.connect(CONFIG['checkup']) # connect to OUT pub address output = ctx.socket(zmq.PUB) output.linger = LINGER output.hwm = 20 output.connect(CONFIG['out']) out = Out(output, **CONFIG) # connect to auth auth = ctx.socket(zmq.REQ) auth.linger = LINGER auth.hwm = 1 auth.connect(AUTH) # connect to m2 sender_id = uuid.uuid4().hex m2 = handler.Connection(sender_id, M2IN, M2OUT) # make mongo connection db = None try: db = get_db(pymongo) except Exception as e: out.send('DB', json.dumps({ 'status': 'DOWN_CONN', 'msg': "Couldn't connect to Mongo at startup." })) # define poller poller = zmq.Poller() poller.register(command, zmq.POLLIN) poller.register(checkup, zmq.POLLIN) poller.register(m2.reqs, zmq.POLLIN) out.send('HELLO') id = uuid.uuid4() while True: try: # wait for IO socks = dict(poller.poll()) # if command PUB comes through if command in socks and socks[command] == zmq.POLLIN: msg = command.recv_json() # log and ignore messages that don't validate if msg.get('key') != KEY: out.send('SECURITY', json.dumps({ 'status': 'WRONG_KEY', 'msg': msg, 'id': str(id) })) continue if msg.get('command') == 'die': out.send('GOODBYE') # clean up sockets command.close() checkup.close() output.close() m2.shutdown() ctx.term() gevent.shutdown() # die return # if a checkup REQ comes through if checkup in socks and socks[checkup] == zmq.POLLIN: # reply msg = checkup.recv() checkup.send("yep.") # if mongrel2 PUSHes a request elif m2.reqs in socks and socks[m2.reqs] == zmq.POLLIN: # handle request req = m2.recv() # if a disconnect, bail if req.is_disconnect(): continue # log request out.send('REQUEST', parse_request(req)) # get session from cookie session = '' cookie = req.headers.get('cookie') if cookie: c = Cookie.SimpleCookie(str(cookie)) s = c.get('session') if s: session = str(s.value) # send auth req try: auth.send(session) except zmq.ZMQError as e: out.send('ERROR', 'Auth service req/rep in wrong state.') # reset state by closing and reconnecting auth.close() auth = ctx.socket(zmq.REQ) auth.linger = LINGER auth.hwm = 1 auth.connect(AUTH) # auth service is down, so 500 m2.reply_http(req, 'Auth service not responding', code=500) continue # poll with timeout for response auth_poller = zmq.Poller() auth_poller.register(auth, zmq.POLLIN) evts = auth_poller.poll(100) # if auth service has responded if evts: resp = auth.recv_json() # if we're authed, serve if resp.get('success'): ########################### ## Now do some app logic ## ########################### # grab a random message from mongo try: c = db.messages.count() r = list(db.messages.find())[random.randrange(0, c)] except (pymongo.errors.ConnectionFailure, pymongo.errors.AutoReconnect) as e: # this request can't happen, so 500 out.send('DB', json.dumps({ 'status': 'LOST_CONN', 'error': str(e) })) m2.reply_http(req, 'DB connection lost.', code=500, headers={ 'Content-Type': 'text/html', "Cache-Control": "no-cache, must-revalidate", "Pragma": "no-cache", "Expires": "Sat, 26 Jul 1997 05:00:00 GMT" }) continue # insert data into markup template if r.get('text'): m = markup.format(msg=r.get('text')) else: m = markup.format(msg='Nobody') # reply with no cache headers m2.reply_http(req, m, headers={ 'Content-Type': 'text/html', "Cache-Control": "no-cache, must-revalidate", "Pragma": "no-cache", "Expires": "Sat, 26 Jul 1997 05:00:00 GMT" }) # log end of request end_time = json.dumps(datetime.datetime.now(), default=dthandler) out.send('REQUEST', json.dumps({ 'status': 'DELIVERED', 'path': req.path, 'time': end_time, 'id': req.conn_id })) ########################### ## app logic is complete ## ########################### # otherwise we do the auth redirect else: auth_url = resp.get('redirect') path = URL_TEMPLATE.rstrip('/').format(req.headers.get('host') + req.headers.get('URI')) # TODO: handle auth url that includes qs's and hashes path = urllib.quote(path) redirect = str(auth_url + '?redirect=' + path) m2.reply_http(req, '', code=302, headers={ 'Location': redirect }) else: # reset state by closing and reconnecting out.send('ERROR', 'Auth timed out.') auth.close() auth = ctx.socket(zmq.REQ) auth.linger = LINGER auth.hwm = 1 auth.connect(AUTH) # auth service is down, so 500 m2.reply_http(req, 'Auth service not responding', code=500) continue # an unexpected error if we get here, respond 500 m2.reply_http(req, 'Server Error', code=500) # keep server up by catching all exceptions raised from inside server loop except Exception as e: out.send('\nFAIL!\n-----') out.send('{0}----'.format(traceback.format_exc()))
""" import sys if len(sys.argv) > 1 and sys.argv[1] == "webtest": print "Running in test mode" from dammit.nullcache import NullCache get_manager = make_instance_getter('manager', lambda: URIManager(config.get_db_mock())) get_known = make_instance_getter('known', lambda: NullCache()) get_unknown = make_instance_getter('unknown', lambda: NullCache()) del sys.argv[1] else: get_manager = make_instance_getter('manager', lambda: URIManager(config.get_db())) get_known = make_instance_getter('known', lambda: cachemanager.new_instance('known')) get_unknown = make_instance_getter('unknown', lambda: cachemanager.new_instance('unknown')) """ get_manager = make_instance_getter('manager', lambda: URIManager(config.get_db())) get_known = make_instance_getter('known', lambda: cachemanager.new_instance('known')) get_unknown = make_instance_getter('unknown', lambda: cachemanager.new_instance('unknown')) class urldammit(object): """ Main service handler """ def HEAD(self, id): """ Check the status of a URI using a HEAD request ID is the SHA-1 of the URI """ u = self._locate(id) if not u: return
def get_features_r(date, val, lat, long, location_num, total_locations, percent_train_data, date_predict=datetime.now().date(), lt_=None, lng_=None): db = config.get_db() sensors = db.sensors sensors.create_index("id") dates = [] vals = [] lats = [] longs = [] # take only those datapoints for which data is not missing for any of the lat longs for i in range(0, val.__len__()): if len(val[i]) == total_locations: dates.append(date[i]) vals.append(val[i]) lats.append(lat[i]) longs.append(long[i]) #get only the date part of datetime for i in range(0, dates.__len__()): dates[i] = dates[i].date() months = find_months(dates) if lt_!= None and lng_!=None: x = [float('nan'), float('nan'), float('nan'), float('nan'), float('nan'), float('nan'), float('nan')] util_x = [float('nan'), float('nan'), float('nan'),float('nan'), float('nan'), float('nan'), float('nan')] lts = [lt_, lt_, lt_, lt_, lt_, lt_, lt_] lngs = [lng_, lng_, lng_, lng_, lng_, lng_, lng_] stations_id = [] stations_id.append(7) totaldata = [] totaldata_ids = [] totaldata.append(-1) totaldata_ids.append(-1) for i in range(0,total_locations): if i != location_num: step0 = time.time() lt = lats[0][i] lng = longs[0][i] df = comparison_funcs.closest_loc(lat=lt, lng=lng) ID = list(df["ID"]) while True: while True: if (ID[0] in stations_id)==False: id_ = ID[0] break else: z = ID[0] stations_id.append(z) del ID[0] if id_ in totaldata_ids: index = totaldata_ids.index(id_) yobidata = totaldata[index] else: step00 = time.time() #get data from last FIVE days pipeline = [ { "$match": { "id": int(id_), "ts": {"$gt": str(date_predict - timedelta(6)), "$lt": str(date_predict) } } }, { "$group": { "_id": "$ts", "r": { "$push": "$r" } } }, { "$sort" : SON([("_id", 1)]) } ] yobidata = list(sensors.aggregate(pipeline, allowDiskUse = True)) totaldata_ids.append(id_) totaldata.append(yobidata) step1 = time.time() #print "Loaded Yobi %d : (%ss)" % (id_,(round((step1 - step00), 1))) if len(yobidata)!=0: #might be useful yobi_data = [] for q in yobidata: if len(q["_id"])>=18 and len(q["r"])!=0: #if math.isnan(q["r"])==False: q["_id"] = str(q["_id"]) yobi_data.append(q) if len(yobi_data)!=0: #might be useful yobi_dates, yobi_vals, a, b = dailysums.yobi(yobi_data) if len(yobi_vals)>=4: #useful data #print "Yobi %d OK" % (id_) break else: #not useful - add to stations_id stations_id.append(id_) #print " Yobi %d -- not useful" %(id_) else: #not useful - add to stations_id stations_id.append(id_) #print " Yobi %d -- not useful" %(id_) else: #not useful - add to stations_id stations_id.append(id_) #print " Yobi %d -- not useful" % (id_) yobi__vals=[] #check if rainfall values are NaN for i in range(0,len(yobi_vals)): if math.isnan(yobi_vals[i]): yobi__vals.append(0.0) else: yobi__vals.append(yobi_vals[i]) yobi_dates.reverse() yobi__vals.reverse() #print yobi__vals #print yobi_dates if len(yobi__vals)>=5: for j in range(0,5): if math.isnan(yobi__vals[i]): x.append(0.0) else: x.append(yobi__vals[i]) #x.append(yobi__vals[0]) #x.append(yobi__vals[1]) #x.append(yobi__vals[2]) #x.append(yobi__vals[3]) #x.append(yobi__vals[4]) util_x.append(float('nan')) for j in range(0,4): if math.isnan(yobi__vals[i]): util_x.append(0.0) else: util_x.append(yobi__vals[i]) #util_x.append(yobi__vals[0]) #util_x.append(yobi__vals[1]) #util_x.append(yobi__vals[2]) #util_x.append(yobi__vals[3]) lts.append(lt) lngs.append(lng) lts.append(lt) lngs.append(lng) lts.append(lt) lngs.append(lng) lts.append(lt) lngs.append(lng) lts.append(lt) lngs.append(lng) elif len(yobi__vals)==4: for j in range(0,4): if math.isnan(yobi__vals[i]): x.append(0.0) else: x.append(yobi__vals[i]) #x.append(yobi__vals[0]) #x.append(yobi__vals[1]) #x.append(yobi__vals[2]) #x.append(yobi__vals[3]) x.append(np.mean(yobi__vals)) util_x.append(float('nan')) for j in range(0,4): if math.isnan(yobi__vals[i]): util_x.append(0.0) else: util_x.append(yobi__vals[i]) #util_x.append(yobi__vals[0]) #util_x.append(yobi__vals[1]) #util_x.append(yobi__vals[2]) #util_x.append(yobi__vals[3]) lts.append(lt) lngs.append(lng) lts.append(lt) lngs.append(lng) lts.append(lt) lngs.append(lng) lts.append(lt) lngs.append(lng) lts.append(lt) lngs.append(lng) elif len(yobi__vals)==3: for j in range(0,3): if math.isnan(yobi__vals[i]): x.append(0.0) else: x.append(yobi__vals[i]) #x.append(yobi__vals[0]) #x.append(yobi__vals[1]) #x.append(yobi__vals[2]) x.append(np.mean(yobi__vals)) x.append(np.mean(yobi__vals)) util_x.append(float('nan')) for j in range(0,3): if math.isnan(yobi__vals[i]): util_x.append(0.0) else: util_x.append(yobi__vals[i]) util_x.append(np.mean(yobi__vals)) lts.append(lt) lngs.append(lng) lts.append(lt) lngs.append(lng) lts.append(lt) lngs.append(lng) lts.append(lt) lngs.append(lng) lts.append(lt) lngs.append(lng) elif len(yobi__vals)==2: for j in range(0,2): if math.isnan(yobi__vals[i]): x.append(0.0) else: x.append(yobi__vals[i]) #x.append(yobi__vals[0]) #x.append(yobi__vals[1]) x.append(np.mean(yobi__vals)) x.append(np.mean(yobi__vals)) x.append(np.mean(yobi__vals)) util_x.append(float('nan')) for j in range(0,2): if math.isnan(yobi__vals[i]): util_x.append(0.0) else: util_x.append(yobi__vals[i]) #util_x.append(yobi__vals[0]) #util_x.append(yobi__vals[1]) util_x.append(np.mean(yobi__vals)) util_x.append(np.mean(yobi__vals)) lts.append(lt) lngs.append(lng) lts.append(lt) lngs.append(lng) lts.append(lt) lngs.append(lng) lts.append(lt) lngs.append(lng) lts.append(lt) lngs.append(lng) elif len(yobi__vals)==1: if math.isnan(yobi__vals[0]): x.append(0.0) else: x.append(yobi__vals[0]) #x.append(yobi__vals[0]) x.append(np.mean(yobi__vals)) x.append(np.mean(yobi__vals)) x.append(np.mean(yobi__vals)) x.append(np.mean(yobi__vals)) util_x.append(float('nan')) if math.isnan(yobi__vals[0]): util_x.append(0.0) else: util_x.append(yobi__vals[0]) util_x.append(np.mean(yobi__vals)) util_x.append(np.mean(yobi__vals)) util_x.append(np.mean(yobi__vals)) lts.append(lt) lngs.append(lng) lts.append(lt) lngs.append(lng) lts.append(lt) lngs.append(lng) lts.append(lt) lngs.append(lng) lts.append(lt) lngs.append(lng) #print x #for location itself step0 = time.time() _df = comparison_funcs.closest_loc(lat=lt_, lng=lng_) ID_ = list(_df["ID"]) # id__ = comparison_funcs.get_nearest_loc_id(lat=lt_, lng=lng_) while True: if ID[0] in stations_id: del ID[0] else: id__ = ID[0] break if id__ in totaldata_ids: index = totaldata_ids.index(id__) yobi_data = totaldata[index] #print "Yobi %d found" % (id__) else: step0 = time.time() pipeline = [ { "$match": { "id": int(id__), "ts": {"$gt": str(date_predict-timedelta(6)), "$lt": str(date_predict)} } }, { "$group": { "_id": "$ts", "r": { "$push": "$r" } } }, { "$sort" : SON([("_id", 1)]) } ] yobi_data = list(sensors.aggregate(pipeline, allowDiskUse = True)) step1 = time.time() #print "Loaded Yobi %d : (%ss)" % (id__,(round((step1 - step0), 1))) # convert dates into string objects for i in range(0,len(yobi_data)): yobi_data[i]["_id"] = str(yobi_data[i]["_id"]) yobi_dates, yobi_vals, a, b = dailysums.yobi(yobi_data) yobi_dates.reverse() yobi_vals.reverse() x[0] = yobi_vals[0] x[1] = yobi_vals[1] x[2] = yobi_vals[2] x[3] = yobi_vals[3] x[4] = yobi_vals[4] x[5] = np.mean(yobi_vals[0:3]) x[6] = datetime.now().date().month print yobi_dates print yobi__vals util_x[0] = float('nan') util_x[1] = yobi_vals[0] util_x[2] = yobi_vals[1] util_x[3] = yobi_vals[2] util_x[4] = yobi_vals[3] util_x[5] = np.nanmean(util_x) util_x[6] = datetime.now().date().month x = np.matrix(x) x = x.reshape(1,-1) #only one sample #other features vals = np.asarray(vals) rows = vals.shape[0] cols = vals.shape[1] #dataframe data = pd.DataFrame({"dates": dates}) data.insert(len(data.columns),str(location_num),vals[:,location_num]) #rolling mean rolmean = data.rolling(window=3).mean() rolmean = np.asarray(rolmean.iloc[:,1]) for i in range(0, 2): rolmean[i] = 0.0 rolmean = np.asarray(rolmean) train_len = int(math.floor(len(vals) * (percent_train_data))) vals_7 = (vals[0:(len(vals) - 7), location_num]) # -7 vals_6 = (vals[1:(len(vals) - 6), location_num]) # -6 vals_5 = (vals[2:(len(vals) - 5), location_num]) # -5 vals_4 = (vals[3:(len(vals) - 4), location_num]) # -4 vals_3 = (vals[4:(len(vals) - 3), location_num]) # -3 vals_2 = (vals[5:(len(vals) - 2), location_num]) # -2 vals_1 = (vals[6:(len(vals) - 1), location_num]) # -1 vals_0 = (vals[7:len(vals), location_num]) # y for training ordinal_dates = data.iloc[:, 0].apply(lambda x: x.toordinal()) ordinal_dates = np.asarray(ordinal_dates) train_len = int(math.floor(len(vals_0) * (percent_train_data))) #rolmean[6:len(vals) - 1], ordinal_dates[7:len(vals)], X = np.column_stack((vals_1, vals_2, vals_3, vals_4, vals_5, rolmean[6:len(vals) - 1], months[7:len(vals)] )) for i in range(0, total_locations): if i != location_num: X = np.column_stack((X, vals[6:(len(vals)-1), i], vals[5:(len(vals)-2), i], vals[4:(len(vals)-3), i], vals[3:(len(vals)-4), i], vals[2:(len(vals)-5), i], )) X = np.column_stack((X,vals_0)) X = np.column_stack((X, dates[7:len(vals)])) X = np.matrix(X) if lt_!= None and lng_!=None: XX = np.matrix(X[:,0:X.shape[1]-2]) YY = np.asarray(np.matrix(X[:,(X.shape[1]-2)]).T) YY = YY.reshape(-1,1) #print YY #print x.shape[1] #print XX.shape[1] #print x.shape[0] #print XX.shape[0] #print YY.shape[0] #print len(YY) return XX, YY, x, util_x, lts, lngs else: XX = np.matrix(X[0:train_len,0:X.shape[1]-2]) YY = np.asarray(np.matrix(X[0:train_len,(X.shape[1]-2)]).T) xx = np.matrix(X[train_len:, 0:X.shape[1] - 2]) yy = np.asarray(np.matrix(X[train_len:, (X.shape[1] - 2)]).T) dd = np.asarray(np.matrix(X[train_len:, (X.shape[1] - 1)]).T) return XX, YY, xx, yy, dd