def handle(self, env, start_response): url = env['PATH_INFO'].strip('/') method = env['REQUEST_METHOD'] handler = self.handlers.get(url) if handler: if method in handler: req = Request(env) res = Response() content = handler[method](req, res) template = handler['TEMPLATE'] template_dir = handler['TEMPLATE_DIR'] if isinstance(content, dict): #possibly wrapped by @template if 'template' in content: template = content['template'] content = content['content'] if isinstance(content, HTTPError): content = getattr(content, method.lower())(req, res) elif template and isinstance(content, dict): template = os.path.join(template_dir, template) content = str(Template(self.get_template(template)).render(content)) res.content_type = 'text/html' elif isinstance(content, dict): content = json.dumps(content) res.content_type = 'application/json' elif not content: content = '' res.content_length = str(len(content)) start_response(res.status, res.headers) return [content]
def test_200_response(self): body = ['test response body'] headers = {'Content-type': 'application/json', 'Content-length': str(len(json.dumps(body))), 'Foo': 'bar', 'Baz': 'Quux'} headers_string = '\r\n'.join([': '.join([header, headers[header]]) for header in headers]) response = Response(status=200, headers=headers, body=body) self.assertEqual(response.raw(), self.response_template % ('200 OK', headers_string, json.dumps(body)))
def pe_ratio(ticker, components): quote = data.get_quote_yahoo(ticker) PE = quote['PE'][0] if PE=='N/A': return {"message": Response.pe_notfound(ticker)} PE = ('%.2f' % PE) return {"message": Response.pe_response(ticker, PE)}
class Manager(): def __init__(self): self.ivizion = Ivizion() self.ivizion.connect() self.message = Message() self.response = Response() def send_command(self, lng=LNG_5, cmd=RESET, data1='', data2=''): self.message.update_message(lng=lng, cmd=cmd, data1=data1, data2=data2) msg = self.message.get_message() self.ivizion.send(msg) def get_status(self): self.message.update_message() msg = self.message.get_message() self.ivizion.send(msg) answer = self.ivizion.recipe() return answer[4:6] def show_status(self): self.message.update_message() msg = self.message.get_message() self.ivizion.send(msg) answer = self.ivizion.recipe() self.response.show_response(answer, REQ_STATUS) def get_escrow(self): self.message.update_message() msg = self.message.get_message() self.ivizion.send(msg) answer = self.ivizion.recipe() return self.response.get_escrow(answer)
def login_client(self, request): client_name = request.parameters['name'] client_pass = request.parameters['pass'] if self.manager.login(client_name, client_pass): response = Response.create_client() else: response = Response.create_login() return response
def last_price(ticker): ticker = ticker.upper() price = Share(ticker).get_price() if not price: return Response.last_price_notfound(ticker) trade_datetime = Share(ticker).get_trade_datetime().split(' ') month, day = calendar.month_name[int(trade_datetime[0][5:7].lstrip('0'))], trade_datetime[0][8:10].lstrip('0') trade_time = trade_datetime[1] + " UTC+0" return Response.last_price(ticker, price, month, day, trade_time)
def respond_to_edit_request(self, message): """ Responds to an edit request. The bot will parse the body of the message, looking for verse quotations. These will replace the quotations that were placed in the original response to the user. Once the comment has been successfully edited, the bot then sends a message to the user letting them know that their verse quotations have been updated. """ try: comment_url = message.body[1:message.body.find("}")] comment = self.r.get_submission(comment_url) except: try: message.reply("An error occurred while processing your edit request. " "Please make sure that you do not modify the subject line of your message to %s." % REDDIT_USERNAME) except requests.exceptions.ConnectionError: pass return if message.author == comment.author and comment: verses = find_verses(message.body) if verses is not None: for reply in comment.comments[0].replies: if str(reply.author) == REDDIT_USERNAME: try: self.log.info("%s has requested a comment edit..." % comment.author) link = reply.permalink[24:comment.permalink.find("/", 24)] response = Response(message, self.parser, comment_url) for verse in verses: book_name = books.get_book(verse[0]) if book_name is not None: v = Verse(book_name, # Book verse[1], # Chapter verse[3], # Translation message.author, # User link, # Subreddit verse[2]) # Verse if not response.is_duplicate_verse(v): response.add_verse(v) if len(response.verse_list) != 0: message_response = ("*^This ^comment ^has ^been ^edited ^by ^%s.*\n\n" % message.author) message_response += response.construct_message() if message_response is not None: self.log.info("Editing %s's comment with updated verse quotations..." % message.author) database.remove_invalid_statistics(reply.body, link) reply.edit(message_response) database.update_db_stats(response.verse_list) try: message.reply("[Your triggered %s response](%s) has been successfully edited to reflect" " your updated quotations." % (REDDIT_USERNAME, comment_url)) except requests.exceptions.ConnectionError: pass break except: raise self.log.warning("Comment edit failed. Will try again later...") break
def submit(d): header = Header(d["method"], d["path"], d["httpversion"]) for i in d['headers']: header.add_header(i['header_name'], i['header_value']) header.build_from_text() print(header.payload) s = Sender() r = Response() resp = r.input_text(s.send(d["protocol"], d["hostname"], header.payload)) return resp
def request(self, action, xml): try: return Response(self.client.send(action, xml), action, True) except SocketError as e: if e.errno == errno.ECONNRESET: response_error = Response("ECONNRESET", action) response_error.error, response_error.error_msg = "ECONNRESET", "[Errno 104] Connection reset by peer" return response_error else: raise
def name_exchange(ticker, components): exchanges = {"NMS": "NASDAQ", "NGM": "NASDAQ", "NYQ":"NYSE"} try: info = data.get_components_yahoo(ticker) except Exception: return {"message": Response.data_notfound(ticker)} name, symbol = info['name'][0], info['exchange'][0] exchange = exchanges.get(symbol) if not exchange: exchange = symbol return {"message": Response.name_exchange_response(ticker, name, exchange)}
def test_parse_links(self): for idx in range(len(testdata)): print idx s = Response(testdata[idx]) links_obj = s.parse_links(testdata[idx]['_links']) self.assertFalse(links_obj=={}) # must return a valid object self.assertEqual(links_obj,s._response['_links']) for link in links_obj: self.assertIn(link,expected_links) # fixed set of expected links self.assertTrue(links_obj[link]) # cannot have empty string for link url print '{0} : {1}'.format(link,links_obj[link]) pass
def respond_to_username_mention(self, msg): """ Responds to a username mention. This could either contain one or more valid Bible verse quotation requests, or it could simply be a username mention without any valid Bible verses. If there are valid Bible verses, VerseBot generates a response that contains the text from these quotations. Otherwise, the message is forwarded to the VerseBot admin for review. :param msg: The message that contains the username mention """ verses = find_verses(msg.body) if verses is not None: response = Response(msg, self.parser) for verse in verses: book_name = books.get_book(verse[0]) if book_name is not None: v = Verse(book_name, # Book verse[1], # Chapter verse[3], # Translation msg.author, # User msg.subreddit.display_name, # Subreddit verse[2]) # Verse if not response.is_duplicate_verse(v): response.add_verse(v) if len(response.verse_list) != 0: message_response = response.construct_message() if message_response is not None: self.log.info("Replying to %s with verse quotations..." % msg.author) try: msg.reply(message_response) database.update_db_stats(response.verse_list) database.increment_comment_count() except praw.errors.Forbidden: # This message is unreachable. pass except praw.errors.APIException as err: if err.error_type in ("TOO_OLD", "DELETED_LINK", "DELETED_COMMENT"): self.log.warning("An error occurred while replying" " with error_type %s." % err.error_type) else: self.log.info("No verses found in this message. " "Forwarding to /u/%s..." % VERSEBOT_ADMIN) try: self.r.send_message(VERSEBOT_ADMIN, "Forwarded VerseBot Message", "%s\n\n[[Link to Original Message](%s)]" % (msg.body, msg.permalink)) except requests.ConnectionError: pass
def execute(message): chat = message['chat'] chat_id = chat['id'] text = message['text'] if text.startswith('/test'): text = text.lstrip('/test') voice_prob = 0.2 resp = Response(mood=Mood.NEUTRAL, voice_probability=voice_prob) resp.add_msg_or_voice(text) main.send_to_group(resp) else: main.send_response(str(chat_id), responder.reply_to(message))
def getToast(): toast_id = int(request.args.get('toastId')) item = Toast.query.get(toast_id) if item is not None: res = { "toast_id":item.id, "body":item.body, "time":item.creation_time.strftime("%Y-%m-%d %H:%M:%S"), "trumpet_count":item.trumpet_count, "shit_count":item.shit_count } return jsonify(Response.success(msg="拉取成功", data=res)) else: return jsonify(Response.fail(msg="找不到这条toast"))
def insertInfo(): name = request.args.get('name') phone = request.args.get('phone') position = request.args.get('position') if phone is None or len(phone.strip())<1 or not phone.isdigit(): return jsonify(Response.fail(msg="信息有误")) items = Scanner.query.filter_by(phone=phone).all() if len(items)>0: return jsonify(Response.fail(msg="该手机号已录入,请更换手机号")) else: scanner = Scanner(name, phone, position) db.session.add(scanner) db.session.commit() return jsonify(Response.success(msg="插入成功", data=scanner.id))
def shit(): toast_id = int(request.args.get('toastId')) uid = int(request.args.get('uid')) shit_operation = ToastOperation(uid, toast_id, SHIT_OPERATION, datetime.now()) db.session.add(shit_operation) db.session.commit() # 让记录+1 toast = Toast.query.get(toast_id) if toast is not None: toast.shit_count += 1 db.session.commit() else: return jsonify(Response.fail(msg="找不到这条toast")) return jsonify(Response.success(msg="shit成功"))
def trailing_volatility(ticker, components): days=None for each in components: if PATTERNS['tvol'].match(each): days = int(each) break if days==None: return {"message": Response.trailing_days(ticker)} try: quotes = data.DataReader(ticker, 'google')['Close'][-days:] except Exception: return {"message": Response.data_notfound(ticker)} logreturns = np.log(quotes / quotes.shift(1)) vol = round(np.sqrt(252*logreturns.var()), 5) return {"message" : Response.trailing_vol(days, ticker, vol)}
def process_request(query, channel): """ Each user query is forwarded to the appropriate function based on the command in the query. Once the query has been processed, the resulting response is sent as a message to the channel. If no commands are found, the default is to fetch the last price for the ticker. """ # components - each query split into a list of words and symbols components = query.split(' ') if '' in components: components.remove('') ticker = components[0] if len(components)>1: command = list(set(COMMANDS).intersection(components[1:])) if not command: message = Response.unknown_command(ticker) return slack_client.api_call("chat.postMessage", channel=channel, text=message, as_user=True) components = components[2:] output = OPERATIONS[command[0]](ticker, components) message = output["message"] if output.get("attachments"): attachments = output['attachments'] return slack_client.api_call("chat.postMessage", channel=channel, text=message, attachments=attachments, as_user=True) return slack_client.api_call("chat.postMessage", channel=channel, text=message, as_user=True) message = OPERATIONS["last_price"](ticker) return slack_client.api_call("chat.postMessage", channel=channel, text=message, as_user=True)
def toast(): uid = request.args.get('uid') body = request.args.get('body') toast = Toast(uid, body, datetime.now(), 0, 0) db.session.add(toast) db.session.commit() return jsonify(Response.success(msg="吐槽成功"))
def logout_client(self, current_page, request): client_name = request.parameters['name'] if self.manager.logout(client_name): response = Response.create_login() else: response = Response(page=current_page) return response
def list_toasts(): page = int(request.args.get('page')) uid = int(request.args.get('uid')) paginate = Toast.query.order_by(Toast.creation_time.desc()).paginate(page, ITEMS_PER_PAGE, False) toast_list = paginate.items res = [] for item in toast_list: item_res = { "toast_id":item.id, "body":item.body, "time":item.creation_time.strftime("%Y-%m-%d %H:%M:%S"), "trumpet_count":item.trumpet_count, "shit_count":item.shit_count, "trumpet":0, "shit":0 } toast_operation_list = ToastOperation.query.filter_by(uid=uid).filter_by(toast_id=item.id).all() for operation in toast_operation_list: if operation.type == 0: item_res['shit'] = 1 if operation.type == 1: item_res['trumpet'] = 1 res.append(item_res) return jsonify(Response.success(msg="拉取成功", data=res))
def verify(): info = request.args.get('info') if info is None or len(info.strip())<1: return jsonify(Response.fail(msg="信息有误,验证失败")) else: infoSegs = info.strip().split('-'); if(len(infoSegs) == 2): item = Scanner.query.get(infoSegs[0]) if item is None or cmp(item.phone, infoSegs[1])!=0: return jsonify(Response.fail(msg="信息有误,验证失败")) if item.valid == 0: return jsonify(Response.fail(msg="该二维码已被验证,不可重复验证")) item.valid = 0 db.session.commit() res = {'name':item.name, 'phone':item.phone, 'position':item.position} return jsonify(Response.success(msg="验证通过", data=res)) return jsonify(Response.fail(msg="信息有误,验证失败"))
def request(self, request): url = "{}{}".format(self._base_url, request.path) http_response = self._session.request( request.method, url, headers=self._headers, params=request.params, data=request.body, stream=request.stream ) response = Response( request.method, http_response.url, http_response.status_code, http_response.reason, http_response.headers.get("content-type", None), ) if request.stream: response.set_line_parser( request.line_parser ) response.set_lines( http_response.iter_lines(1) ) else: response.set_raw_body(http_response.text) return response
def handle_conn(conn, addr, app): print "Accepted connection from: " + str(addr) while True: try: data = conn.recv(2**20) except Exception, _: print 'Closing connection at: ', addr conn.close() return response = Response() try: request = Request(data) except ServerError, e: print "Request Error: ", e.message, '\n', data response.set_status(status_code=e.code) conn.sendall(str(response)) conn.close() return
def get_fred(symbol, components): symbol = symbol.upper() try: df = data.get_data_fred(symbol) except Exception: return {"message":Response.fred_notfound(symbol)} today, dates = current_date(), [] for each in components: if PATTERNS['valid_date'].match(each): dates.append(each) # No dates: get most recent value if not dates: df.dropna() last_value = df.tail(1)[symbol][0] last_value = ('%.3f' % last_value) return {"message": Response.basic_fred(symbol, last_value)} # Clean dates if len(dates)>2: return {"message": Response.too_many_dates(symbol)} for each in dates: if each > today: return {"message": Response.invalid_date(each)} try: date = datetime.datetime.strptime(each, '%Y-%m-%d') except ValueError: return {"message": Response.invalid_date(each)} # Return price data for one day if len(dates)==1: date = dates[0] ts = pd.DatetimeIndex.asof(df.index, date) if pd.isnull(ts): return {"message": Response.fred_date_notfound(symbol, date)} value = df.loc[ts][symbol] if pd.isnull(value): return {"message": Response.fred_date_notfound(symbol, date)} return {"message": Response.date_fred(symbol, date, value)} # If 2 dates are entered, returned the range during the given period else: dates = sorted(dates) start, end = dates[0], dates[1] df = df.loc[start:end] high = ('%.3f' % df[symbol].max()) low = ('%.3f' % df[symbol].min()) return {"message": Response.fred_data_range(symbol, start, end, high, low)}
async def send_verses(self, body, verses, user, channel, websocket): if verses is not None: response = Response(body, self.parser) for verse in verses: book_name = books.get_book(verse[0]) if book_name is not None: v = Verse(book_name, verse[1], verse[3], user, channel, verse[2]) if not response.is_duplicate_verse(v): response.add_verse(v) if len(response.verse_list) != 0: message_response = response.construct_message() if message_response is not None: await self.send_message(message_response, channel, websocket) else: pass
def range_volatility(ticker, components): # Parse and check: find components matching dates, ensure start and end are present, ensure dates are valid today, dates = current_date(), [] for each in components: if PATTERNS['valid_date'].match(each): dates.append(each) if len(dates)!=2: return {"message": Response.vol_required_dates(ticker)} for each in dates: if each > today: return {"message": Response.invalid_date(each)} try: date = datetime.datetime.strptime(each, '%Y-%m-%d') except ValueError: return {"message": Response.invalid_date(each)} # Volatility Calculation dates = sorted(dates) start, end = dates[0], dates[1] try: quotes = data.DataReader(ticker, 'google')['Close'].loc[start:end] if len(quotes) < 10: return {"message": Response.vol_range_size(ticker)} except Exception: return {"message": Response.data_notfound(ticker)} logreturns = np.log(quotes / quotes.shift(1)) vol = round(np.sqrt(252*logreturns.var()), 5) return {"message" : Response.range_vol(ticker, start, end, vol)}
def handle(self): self.raw_request = self.request.recv(2048).strip() self.request_lines = self.raw_request.split('\r\n') # Log the request print "[{0}] {1}".format(self.client_address[0], self.request_lines[0]) # The request itself is the first line, everything after it is a header # For example: GET / HTTP/1.1\r\n self.http_request = Request(self.request_lines[0]) # The response is an object that gets serialized and written to the socket at the end of the server loop self.response = Response(self.http_request) try: # Check if the request method is allowed, as this is a limited server self.http_request.validate() # What file are we trying to load? path = self.requested_file(self.http_request.details["path"]) # If it's a directory, try to load index.html if isdir(path): path = "{0}index.html".format(path) # Serve the file if isfile(path): with open(path) as file: headers = { "Content-Type": self.content_type(file.name), "Content-Length": getsize(file.name), "Connection": "close" } self.response.headers.add_headers(**headers) self.response.body = file.read() else: # Serve a 404 self.response.set_code(404) self.response.body = "File not found\n" headers = { "Content-Type": "text/plain", "Content-Length": len(self.response.body), "Connection": "close" } self.response.headers.add_headers(**headers) except Exception as e: # Serve a 500 with the exception message self.response.set_code(500) self.response.body = e.value finally: # Make sure we write response to the socket regardless of if there were any errors self.request.sendall(self.response.__str__())
def actions(ticker, components): # Splits are quoted as decimals - convert to fraction ('7 for 1' vs .142857) def split_ratio(dec): frac = Fraction.from_float(dec).limit_denominator(10) num, denom = frac.numerator, frac.denominator return "{} for {}".format(denom, num) try: actions = data.get_data_yahoo_actions(ticker) except Exception: return {"message": Response.data_notfound(ticker)} if len(actions)==0: return {"message": Response.no_actions(ticker)} actions.ix[actions.action=="SPLIT", 'value'] = actions.value.map(lambda x: split_ratio(x)) actions['action'] = actions.action.map(lambda x: x.lower()) actions.index = actions.index.map(lambda x: datetime.date(x.year, x.month, x.day).strftime('%Y-%m-%d')) actions.iloc[::-1] # Build message from DataFrame message = Response.list_actions(ticker) for date, action, value in zip(actions.index, actions.action, actions.value): message += ("\n{} - {} `{}`".format(date, action, value)) return {"message": message}
def get(self): if not database.get_setting(Setting.ENABLED): return response = Response() if prob(0.2): response.add_msg_or_voice(random_of(good_night_phrases)) elif prob(0.05): response.add_msg(get_9gag_post_url()) if prob(0.5): response.add_msg_or_voice(laughter) main.send_to_group(response)
def get_changelog(self, key): url = urlparse.urljoin(self.endpoint.base_url, 'monitors/%s/changes/' % key) resp = self.endpoint.get(url) return Response(resp, self.endpoint)
def delete(self, **params): response = requests.delete(self.path, params=params, headers=self.headers) self.response = Response(self, response) self.response.raise_if_error() return self.response
def show(self, **kwargs): return Response(self.fetch(**kwargs))
def post(self): data = request.get_json() # Checks if required data to create an appointment was provided in request if not data: return res.badRequestError("Missing data to process request.") if not data.get("topic") and not data.get("project"): return res.badRequestError( "Missing data to process request. No topic or project provided to search for mentors" ) if not data.get("login"): return res.badRequestError( "Missing data to process request. No user login provided") # Checks if project name exists in database queryProject = Project.query.filter_by( name=data.get("project")).first() if not queryProject: return res.resourceMissing("No project {} found.".format( data.get("project"))) project, error = project_schema.dump(queryProject) if error: return res.internalServiceError(error) print(project) # Checks if user with provided login exists in database user, error = User.queryByLogin(data.get("login")) if error: return res.resourceMissing(error) print(user) queryUserAppointment = Appointment.query.filter_by( id_user=user['id'], status=Status['Pending']).first() if queryUserAppointment: return res.badRequestError( "You have already an appointment pending") # Limits appointments made by user for a specific project projectAppointmentsCount = Appointment.queryCountProjectAppointmentsbyUser( project["id_project42"], user["id"]) if projectAppointmentsCount > _maxAppointmentsPerDay: return res.badRequestError( "User reached limit appointments for project {}".format( data.get("project"))) # Retrieves available mentors for the specified project queryMentor = Mentor.query \ .filter(~Mentor.appointments.any(), Mentor.id_project42==project['id_project42'], Mentor.active==True, Mentor.id_user42!=user['id_user42']) \ .all() queryMentor2 = Mentor.query \ .join(Appointment) \ .filter(Mentor.id_project42==project['id_project42'], Mentor.active==True, Mentor.id_user42!=user['id_user42']) \ .filter(Appointment.status==2) \ .all() for q in queryMentor2: queryMentor.append(q) if not queryMentor: print("hereee") return res.resourceMissing( 'No mentors found for project {}'.format(data.get('project'))) #mentors = [mentor for mentor in queryMentor if mentor.app] #mentors = mentors_schema.dump(queryMentor).data onlineUsers = Api42.onlineUsers() # Checks online students is not empty if len(onlineUsers) == 0: return res.resourceMissing("No mentors found on campus.") availablementors = [ mentor for mentor in queryMentor for x in onlineUsers if mentor.id_user42 == x['id'] ] # Checks if there is avaliable online mentors for the project/topic if not availablementors: return res.resourceMissing( "No mentors online found for {}.".format(data.get("project"))) # Calls 'mentor algorithm' to select a mentor from availablementors. chosenMentor = mentorAlgorithm(availablementors) print(chosenMentor) # Creates and returns appointment if valid if not chosenMentor: return res.internalServiceError("Error: mentor selection.") newappointment, error = Appointment.createAppointment( chosenMentor.id, user['id']) if error: return res.internalServiceError(error) print(newappointment) return res.postSuccess("Appointment created successfully", newappointment)
class infectoDoppler(): "Configuring and running simulations to build a version" configdata = None ppath = None vpath = None filelist = None vname = None starttime = 0 population = 0 #Sounds population size infectedratio = 0.0 #Infected population ratio days = 0 #Duration for the simulation in days daysize = 0 #Day duration in audio samples caseszero = 0 #Number of infected objects for the first day modecontacts = 0 #Default number of contacts response = False dp = [] ac = AudioControl() samplerate = None channels = 0 audiodata = None audiolength = 0 #Output audio length in samples audiomax = 0.0 #Here we save the maximum level of the audio signal vr = Virus() rp = Response() vz = Visualization() infectedset = set() recoveredset = set() infectiondata = None epidemic = None infectedsounds = None def __init__(self, configpath): infectoDoppler.configdata = js.load(open(configpath)) infectoDoppler.setConfig(infectoDoppler.configdata) infectoDoppler.infectiondata = infectoDoppler.getInfectionDataList() infectoDoppler.epidemic = infectoDoppler.getEpidemicDataframe() infectoDoppler.infectedsounds = infectoDoppler.getInfectedSoundsDataframe( ) print(self) infectoDoppler.buildPopulation(infectoDoppler.filelist) infectoDoppler.startInfections(infectoDoppler.caseszero, infectoDoppler.population) infectoDoppler.starttime = tm.time() infectoDoppler.saveFirstDay() infectoDoppler.run() #Building a new infecto Doppler version def run(): print(" ", end="\r") for d in range(infectoDoppler.days): print("-- Simulating day: " + str(d), end="\r") infectoDoppler.mergeInfections(d) infectoDoppler.simulateDay(d) infectoDoppler.updateInfectedRatio() infectoDoppler.ac.updateAudioGain(infectoDoppler.infectedratio) infectoDoppler.saveDay(d) print("-- Simulation is finished! ", end="\n") print("-- Time needed for building this version: " + \ infectoDoppler.getSimulationTime(infectoDoppler.starttime, tm.time())) infectoDoppler.saveSimulationData() infectoDoppler.saveVersion() print("-- Plotting version's data...", end="\r") infectoDoppler.vz.simulationVisualization(infectoDoppler.vname, infectoDoppler.vpath + "charts/", \ infectoDoppler.epidemic, infectoDoppler.audiodata, infectoDoppler.samplerate, \ infectoDoppler.population, infectoDoppler.days) print("-- New version of infecto Doppler saved!", end="\n") #Merging audio data from infected doppler objects def mergeInfections(d): firstsample = d * infectoDoppler.daysize for s in range(infectoDoppler.daysize): for o in infectoDoppler.infectedset: infectoDoppler.sumAmplitude(infectoDoppler.dp[o], firstsample + s) infectoDoppler.evolInfection(infectoDoppler.dp[o], d) infectoDoppler.updateInfectedSet() #Adding doppler object sample to sound stream def sumAmplitude(doppler, s): amplitude = infectoDoppler.ac.updateDopplerSampleAmplitude( doppler.audiodata[doppler.infectioncourse]) infectoDoppler.audiodata[doppler.dchannel][s] += amplitude infectoDoppler.checkAudioMax( infectoDoppler.audiodata[doppler.dchannel][s]) #Saving de maximum amplitude value to normalize before saving def checkAudioMax(a): if infectoDoppler.audiomax < abs(a): infectoDoppler.audiomax = abs(a) #Simulating an epidemic day def simulateDay(d): actualinfected = infectoDoppler.infectedset.copy() for o in actualinfected: infectoDoppler.spread(infectoDoppler.dp[o], d) infectoDoppler.rp.checkResponse(d) def updateInfectedSet(): infectoDoppler.infectedset = infectoDoppler.infectedset.difference( infectoDoppler.recoveredset) infectoDoppler.recoveredset = set() #Executing the virus spread def spread(idoppler, d): n = idoppler.getDayContactsCount( infectoDoppler.modecontacts, infectoDoppler.rp.getIsolationFactor()) c = rd.sample(range(0, infectoDoppler.population - 1), n) for i in range(len(c)): infectoDoppler.decideInfection(infectoDoppler.dp[c[i]], d) def decideInfection(doppler, d): t = rd.random() if t < infectoDoppler.vr.infectionthreshold: infectoDoppler.checkImmunity(doppler, infectoDoppler.vr.immunityperiod, d) if (doppler.hasimmunity == False and doppler.isinfected == False): infectoDoppler.setInfection(doppler, d) def setInfection(doppler, d): doppler.infectionnumber += 1 doppler.isinfected = True doppler.infectiondate = d infectoDoppler.infectiondata[1] += 1 infectoDoppler.infectiondata[2] += 1 infectoDoppler.infectiondata[doppler.dchannel + 3] += 1 infectoDoppler.infectedset.add(doppler.dnumber) def evolInfection(doppler, d): doppler.infectioncourse += 1 if doppler.infectioncourse == doppler.size: infectoDoppler.setCure(doppler, d) def setCure(doppler, d): doppler.isinfected = False doppler.wasinfected = True doppler.hasimmunity = True doppler.enddate = d doppler.infectioncourse = 0 infectoDoppler.infectiondata[2] -= 1 infectoDoppler.infectiondata[doppler.dchannel + 3] -= 1 infectoDoppler.recoveredset.add(doppler.dnumber) infectoDoppler.saveInfection(doppler) def startInfections(c, p): l = rd.sample(range(0, p - 1), c) for i in range(c): infectoDoppler.setInfection(infectoDoppler.dp[l[i]], 1) infectoDoppler.updateInfectedRatio() print("-- Cases zero injected!", end="\n") def checkImmunity(doppler, period, d): if doppler.hasimmunity == True: if d > doppler.enddate + period: doppler.hasimmunity = False def updateInfectedRatio(): infectoDoppler.infectedratio = infectoDoppler.infectiondata[ 2] / infectoDoppler.population def setConfig(data): infectoDoppler.ppath = data["pPath"] infectoDoppler.vpath = data["vPath"] infectoDoppler.vname = data["name"] infectoDoppler.days = data["days"] infectoDoppler.daysize = data["daySize"] infectoDoppler.caseszero = data["casesZero"] infectoDoppler.modecontacts = data["modeContacts"] infectoDoppler.vr.infectionthreshold = data["infectionThreshold"] infectoDoppler.vr.immunityperiod = data["immunityPeriod"] infectoDoppler.response = data["response"] infectoDoppler.rp.responseisactive = False infectoDoppler.rp.responsestart = data["responseStart"] infectoDoppler.rp.actionsperiod = data["actionsPeriod"] infectoDoppler.rp.isolationfactor = data["isolationFactor"] infectoDoppler.samplerate = data["sampleRate"] infectoDoppler.channels = data["channels"] infectoDoppler.audiolength = infectoDoppler.days * infectoDoppler.daysize infectoDoppler.audiodata = np.zeros( (infectoDoppler.channels, infectoDoppler.audiolength), dtype="float32") infectoDoppler.ac.gaincontrol = data["gainControl"] infectoDoppler.ac.gainfactorat0 = data["gainFactorAt0"] infectoDoppler.ac.gainfactorat1 = data["gainFactorAt1"] infectoDoppler.filelist = os.listdir(data["pPath"]) infectoDoppler.cleanFileList(infectoDoppler.filelist) infectoDoppler.filelist.sort() if data["population"] > 0 and data["population"] <= len( infectoDoppler.filelist): infectoDoppler.population = data["population"] else: infectoDoppler.population = len(infectoDoppler.filelist) def buildPopulation(filelist): for f in range(infectoDoppler.population): print("-- Loading sound object " + str(f), end="\r") infectoDoppler.dp.append( Doppler(f, infectoDoppler.channels, infectoDoppler.ppath, filelist[f])) print("-- Sound objects loaded! ", end="\n") def saveFirstDay(): row = pd.DataFrame([(0, 0, 0, 0, 0, 0, 0, 0.0)], columns=infectoDoppler.epidemic.columns) infectoDoppler.epidemic = pd.concat([infectoDoppler.epidemic, row]) def saveDay(d): infectoDoppler.infectiondata[0] = d + 1 infectoDoppler.infectiondata[len(infectoDoppler.infectiondata) - 1] = infectoDoppler.infectedratio row = pd.DataFrame([infectoDoppler.infectiondata[:]], columns=infectoDoppler.epidemic.columns) infectoDoppler.epidemic = pd.concat([infectoDoppler.epidemic, row]) def saveInfection(doppler): row = pd.DataFrame([[doppler.dnumber, doppler.name, doppler.infectionnumber, doppler.dchannel, \ doppler.contactfactor, doppler.infectiondate, doppler.enddate, doppler.size]], \ columns=infectoDoppler.infectedsounds.columns) infectoDoppler.infectedsounds = pd.concat( [infectoDoppler.infectedsounds, row]) def saveVersion(): filename = infectoDoppler.vpath + "audio/" + infectoDoppler.vname infectoDoppler.ac.processAndSave(filename, infectoDoppler.samplerate, infectoDoppler.audiodata, \ infectoDoppler.audiomax) def cleanFileList(filelist): f = 0 while (f < len(filelist)): if filelist[f].startswith("."): del filelist[f] else: f += 1 def getInfectionDataList(): l = [] l.append(0) l.append(0) l.append(0) for c in range(infectoDoppler.channels): l.append(0) l.append(0.0) return l def saveSimulationData(): infectoDoppler.epidemic.set_index("Day", inplace=True) infectoDoppler.epidemic.to_csv(infectoDoppler.vpath + "data/" + infectoDoppler.vname + "_ep.csv") infectoDoppler.infectedsounds.set_index("Number", inplace=True) infectoDoppler.infectedsounds.to_csv(infectoDoppler.vpath + "data/" + infectoDoppler.vname + "_inf.csv") def getInfectedSoundsDataframe(): columns = [ "Number", "Name", "Infections", "Channel", "CF", "I start", "I end", "Size" ] p = pd.DataFrame(columns=columns) p.index = p["Number"] p.index.name = "Number" return p def getEpidemicDataframe(): columns = [] columns.append("Day") columns.append("Total infected") columns.append("Infected") for c in range(infectoDoppler.channels): columns.append("Infected in " + str(c)) columns.append("Infected ratio") p = pd.DataFrame(columns=columns) p.index = p["Day"] p.index.name = "Day" return p #Calculating time needed for simulation to finish... def getSimulationTime(startTime, endTime): time = endTime - startTime formatedTime = infectoDoppler.formatTime(time) return formatedTime def formatTime(time): ms = "" minutes = time // 60 seconds = time - minutes * 60 seconds = round(seconds, 2) ms = "{:02d}".format(int(minutes)) ms += ":" ms += "{:05.2f}".format(seconds) return ms def __str__(self): return "------------------------------------------\n" + \ "---------- infectoD o p p l e r ----------\n" + \ "- rodrigovalla.gitlab.io/infectodoppler --\n" + \ "- gitlab.com/rodrigovalla/infectodoppler -\n" + \ "------------ infectoDoppler --------------\n" + \ "------------- Version: 0.95 --------------\n" + \ "-- Version name: " + str(infectoDoppler.vname) + "\n" + \ "-- Channels: " + str(infectoDoppler.channels) + "\n" + \ "-- Population: " + str(infectoDoppler.population) + "\n" + \ "-- Days: " + str(infectoDoppler.days) + "\n" + \ "-- Day size: " + str(infectoDoppler.daysize) + "\n"
def delete_rule(self, key): url = urlparse.urljoin(self.endpoint.base_url, 'monitors/' + key) resp = self.endpoint.delete(url) return Response(resp, self.endpoint)
class Server: BAD_REQUEST = Response(code=HTTPResponseCodes.BAD_REQUEST, content_type=HTTPContentTypes.PLAIN, data='Failure'.encode()) UNAUTHORIZED = Response(code=HTTPResponseCodes.UNAUTHORIZED, content_type=HTTPContentTypes.PLAIN, data='Failure'.encode()) METHOD_NOT_ALLOWED = Response(code=HTTPResponseCodes.UNAUTHORIZED, content_type=HTTPContentTypes.PLAIN, data='Failure'.encode()) SUCCESS = Response(code=HTTPResponseCodes.OK, content_type=HTTPContentTypes.PLAIN, data='Success'.encode()) def __init__(self, host_ip=None, host_name=None, port=None): if host_ip is None: if host_name is None: host_name = socket.gethostname() if host_ip is None: host_ip = "192.168.1.5" #socket.gethostbyname(host_name) if port is None: port = 7654 self.socket = socket.socket() self.host_ip = host_ip self.host_name = host_name self.port = port self.socket.bind((self.host_ip, self.port)) self.socket.listen(5) def run(self): while True: client, address = self.socket.accept() message = client.recv(4096) self.communicate(client, address, message) def communicate(self, client, address, message): # PARSE REQUEST req = Request(msg=message) resp = None console.info( 'Received {} request from {} at {} with {} parameters'.format( req.method.name, address[0], req.path, 0 if req.params is None else len(req.params.keys()))) # CHECK TO MAKE SURE POST REQUEST WAS RECEIVED if req.method == HTTPMethods.POST: # CHECK TO MAKE SURE DATA WAS RECEIVED if req.data is not None: # CHECK TO MAKE SURE JSON HAS CORRECT FIELDS if 'command' in req.data.keys() and 'key' in req.data.keys(): console.debug('Received command: {}'.format( req.data['command'])) # AUTHENTICATE KEY if req.data['key'] == settings.MASTER_KEY: # CALL APPROPRIATE FUNCTION (OPEN/CLOSE) if req.data['command'] == 0: garage.open_g() resp = self.SUCCESS elif req.data['command'] == 1: garage.close_g() resp = self.SUCCESS else: resp = self.BAD_REQUEST else: resp = self.UNAUTHORIZED else: resp = self.BAD_REQUEST else: resp = self.BAD_REQUEST else: resp = self.METHOD_NOT_ALLOWED # SEND RESPONSE client.send(resp.generate()) # CLOSE CLIENT CONNECTION client.close()
def serve_file(sock: socket.socket, path: str) -> None: """Given a socket and the relative path to a file (relative to SERVER_ROOT), send that file to the socket if it exists. If the file doesn't exist, send a "404 Not Found" response. """ if path == "/": path = "/index.html" abspath = os.path.normpath(os.path.join(SERVER_ROOT, path.lstrip("/"))) if not abspath.startswith(SERVER_ROOT): response = Response(status="404 Not Found", content="Not Found") response.send(sock) return try: with open(abspath, "rb") as f: content_type, encoding = mimetypes.guess_type(abspath) if content_type is None: content_type = "application/octet-stream" if encoding is not None: content_type += f"; charset={encoding}" response = Response(status="200 OK", body=f) response.headers.add("content-type", content_type) response.send(sock) return except FileNotFoundError: response = Response(status="404 Not Found", content="Not Found") response.send(sock) return
def handle_client(self, client_sock: socket.socket, client_addr: typing.Tuple[str, int]) -> None: with client_sock: try: request = Request.from_socket(client_sock) if "100-continue" in request.headers.get("expect", ""): response = Response(status="100 Continue") response.send(client_sock) try: content_length = int( request.headers.get("content-length", "0")) except ValueError: content_length = 0 if content_length: body = request.body.read(content_length) print("Request body", body) if request.method != "GET": response = Response(status="405 Method Not Allowed", content="Method Not Allowed") response.send(client_sock) return serve_file(client_sock, request.path) except Exception as e: print(f"Failed to parse request: {e}") response = Response(status="400 Bad Request", content="Bad Request") response.send(client_sock)
def handle_one_request(self): response = None try: self.close_connection = False request_line_is_valid = self.get_request_line() if self.close_connection: return request_is_valid = self.parse_request() if not request_is_valid: #parse_request() actually sends its own error responses return self.server.rewriter.rewrite(self) request = Request(self) response = Response(self, request) if not request_line_is_valid: response.set_error(414) response.write() return logger.debug("%s %s" % (request.method, request.request_path)) handler = self.server.router.get_handler(request) if handler is None: response.set_error(404) else: try: handler(request, response) except HTTPException as e: response.set_error(e.code, e.message) except Exception as e: if e.message: err = e.message else: err = traceback.format_exc() response.set_error(500, err) logger.info( "%i %s %s (%s) %i" % (response.status[0], request.method, request.request_path, request.headers.get('Referer'), request.raw_input.length)) if not response.writer.content_written: response.write() except socket.timeout, e: self.log_error("Request timed out: %r", e) self.close_connection = 1 return
def inner(*args, **kwargs): result = func(*args, **kwargs) if result: return Response(result) else: return result # don't turn into response object on falsey value
def __init__(self): # Create main components. self.world = World() self.arms = Arms(World.tf_listener) self.session = Session(object_list=self.world.get_frame_list()) # ROS publishers and subscribers. self._viz_publisher = rospy.Publisher('visualization_marker_array', MarkerArray) self._arm_reset_publisher = rospy.Publisher('arm_control_reset', String) self._action_state_feedback = rospy.Publisher('action_state_feedback', String) rospy.Subscriber('/action/perform_action', Command, self._action_command_cb) # Initialize trajectory recording state. self._is_recording_motion = False self._arm_trajectory = None self._trajectory_start_time = None # This is the main mechanism by which code is executed. A # Response as a combination of a function to call and a # parameter. Responses are created here to be triggered by # commands. Once a Response is respond(...)ed, a robot speech # utterance and a gaze action are created and then executed. # (This happens internally in Response.respond(...)). self.responses = { Command.TEST_MICROPHONE: Response(self._empty_response, [RobotSpeech.TEST_RESPONSE, GazeGoal.NOD]), Command.RELAX_RIGHT_ARM: Response(self._relax_arm, Side.RIGHT), Command.RELAX_LEFT_ARM: Response(self._relax_arm, Side.LEFT), Command.OPEN_RIGHT_HAND: Response(self._open_hand, Side.RIGHT), Command.OPEN_LEFT_HAND: Response(self._open_hand, Side.LEFT), Command.CLOSE_RIGHT_HAND: Response(self._close_hand, Side.RIGHT), Command.CLOSE_LEFT_HAND: Response(self._close_hand, Side.LEFT), Command.STOP_EXECUTION: Response(self._stop_execution, None), Command.DELETE_ALL_STEPS: Response(self._delete_all_steps, None), Command.DELETE_LAST_STEP: Response(self._delete_last_step, None), Command.FREEZE_RIGHT_ARM: Response(self._freeze_arm, Side.RIGHT), Command.FREEZE_LEFT_ARM: Response(self._freeze_arm, Side.LEFT), Command.CREATE_NEW_ACTION: Response(self._create_action, None), Command.EXECUTE_ACTION: Response(self._execute_action, None), Command.NEXT_ACTION: Response(self._next_action, None), Command.PREV_ACTION: Response(self._previous_action, None), Command.SAVE_POSE: Response(self._save_step, None), Command.RECORD_OBJECT_POSE: Response(self._record_object_pose, None), Command.START_RECORDING_MOTION: Response(self._start_recording, None), Command.STOP_RECORDING_MOTION: Response(self._stop_recording, None) } # Span off a thread to run the update loops. threading.Thread(group=None, target=self.update, name='interaction_update_thread').start() # Register signal handlers for program termination. # TODO(mbforbes): Test that these are really catching the # signals. I think we might have to pass disable_signals=True to # rospy.init_node(...), though I'm not sure. signal.signal(signal.SIGINT, self._signal_handler) signal.signal(signal.SIGQUIT, self._signal_handler) rospy.on_shutdown(self._on_shutdown) # The PbD backend is ready. rospy.loginfo('Interaction initialized 1.03') self.world.update_object_pose() self._ping_srv = rospy.Service('interaction_ping', Ping, self._interaction_ping)
-9.424780E+00 + 0.000000E+00j, -6.283190E+02 + 0.000000E+00j, -5.654870E+02 + 9.794520E+02j, -5.654870E+02 + -9.794520E+02j, 0.000000E+00 + 0.000000E+00j, 0.000000E+00 + 0.000000E+00j ] # this is from sensor test suite zer3 = [0.000000E+00 + 0.000000E+00j, 0.000000E+00 + 0.000000E+00j] pol3 = [ -1.263000E-02 + 1.265000E-02j, -1.263000E-02 + -1.265000E-02j, -3.620107E+01 + 6.850121E+01j, -3.620107E+01 + -6.850121E+01j ] instName = 'SFJD_10' print("calculating info for a " + instName) inst1 = Response(desc=instName, units='Radians') inst1.zeros = zer1 inst1.poles = pol1 #norm_freq=0.02 norm_freq = 1.0 n1, f1 = inst1.check_normalization(freq=norm_freq, nfft=2**26, t_sample=0.001) scale_fac = 1.0 / n1 print('The A0 norm factor is: ' + str(scale_fac) + ' for f=' + str(norm_freq)) #check the value inst1.a0 = 1.0 / n1 A01 = inst1.a0 n, f = inst1.check_normalization(freq=norm_freq, nfft=2**26, t_sample=0.001) print('This should be close to 1: ' + str(1.0 / n)) h1, f1 = paz_to_freq_resp(inst1.poles, inst1.zeros, scale_fac,
def replacement(*args, **kwargs): return Response(501)
class Resource(object): API_URL = 'https://api.usergrid.com' HEADERS = { 'content-type': 'application/json' } def __init__(self, path=API_URL, headers=None, entity=None): self.path = path self.headers = headers or Resource.HEADERS self.entity = entity def __getattr__(self, item): path = os.path.join(self.path, item) return Resource(path) def __repr__(self): return '{}<{}>'.format(__name__, self.path) def __str__(self): return unicode({self.path: self.entity}) @property def access_token(self): return self.headers.get('Authorization', '').rsplit('Bearer ') or None @access_token.setter def access_token(self, value): self.headers.update({'Authorization': 'Bearer {}'.format(value)}) @access_token.deleter def access_token(self): self.headers.pop('Authorization', None) def get(self, **params): response = requests.get(self.path, params=params, headers=self.headers) self.response = Response(self, response) self.response.raise_if_error() return self.response def post(self, data=None, **params): data = utils.jsonify_data(data) response = requests.post(self.path, data=data, params=params, headers=self.headers) self.response = Response(self, response) self.response.raise_if_error() return self.response def put(self, data=None, **params): data = utils.jsonify_data(data) response = requests.put(self.path, data=data, params=params, headers=self.headers) self.response = Response(self, response) self.response.raise_if_error() return self.response def delete(self, **params): response = requests.delete(self.path, params=params, headers=self.headers) self.response = Response(self, response) self.response.raise_if_error() return self.response def query(self, query=None, options=None): options = options or {} if query: options = options.update({'ql': query}) return self.get(**options) def update_query(self, updates, query=None, options=None): options = options or {} if query: options = options.update({'ql': query}) return self.put(data=updates, **options) def login(self, username, password): entity = self['token'].get(grant_type='password', username=username, password=password) self.access_token = entity['access_token'] def save(self): self.put(self.entity) @property def entities(self): for entity in self.response.entities: yield Resource(entity['uri'], self.headers, entity=entity)
def info(): cpu_use = psutil.cpu_percent() mem_use = psutil.virtual_memory().percent response = Response(data={'cpu_use': cpu_use, 'mem_use': mem_use}) return response.to_json()
def get_annotations(self, key): url = urlparse.urljoin(self.endpoint.base_url, 'monitors/annotations/' + key) resp = self.endpoint.get(url) return Response(resp, self.endpoint)
async def request(self, client, addr): request = (await self.loop.sock_recv( client, self.SOCKET_BUFFER_SIZE)).decode('utf-8') if not self.reqParser(request): response = Response(400, setHeaders()) await self.loop.sock_sendall(client, str(response).encode('utf-8')) await self.loop.sock_sendall(client, None) client.close() return if self.reqParser.requestLine.method not in self.ALLOWED_METHODS: response = Response(405, setHeaders()) await self.loop.sock_sendall(client, str(response).encode('utf-8')) await self.loop.sock_sendall(client, None) client.close() return pathToFile = os.path.abspath( os.path.join(self.document_root, self.reqParser.requestLine.path)) pathToFile = unquote(pathToFile) incorrectFile = (not pathToFile.endswith('/') ) and self.reqParser.requestLine.path.endswith('/') if self.document_root not in pathToFile: response = Response(403, setHeaders()) await self.loop.sock_sendall(client, str(response).encode('utf-8')) await self.loop.sock_sendall(client, None) client.close() return isAdded = False if os.path.isdir(pathToFile): isAdded = True pathToFile = os.path.join( pathToFile, self.reqParser.requestLine.INDEX_FILE_NAME) if not os.path.exists(pathToFile): if isAdded: response = Response(403, setHeaders()) else: response = Response(404, setHeaders()) await self.loop.sock_sendall(client, str(response).encode('utf-8')) await self.loop.sock_sendall(client, None) else: if incorrectFile and not isAdded: response = Response(404, setHeaders()) await self.loop.sock_sendall(client, str(response).encode('utf-8')) await self.loop.sock_sendall(client, None) else: headers = setHeaders() headers.append( ('Content-Length', str(os.path.getsize(pathToFile)))) mimeType, _ = mimetypes.guess_type(pathToFile) headers.append(('Content-Type', mimeType)) response = Response(200, headers) if self.reqParser.requestLine.method == 'HEAD': await self.loop.sock_sendall(client, str(response).encode('utf-8')) await self.loop.sock_sendall(client, None) else: with open(pathToFile, 'rb') as fp: await self.loop.sock_sendall( client, str(response).encode('utf-8')) await self.loop.sock_sendall( client, fp.read(self.SOCKET_BUFFER_SIZE)) client.close()
def do_POST(self): print(self.path.split('/?')[0]) is_matched = self.path.split('/?')[0] if is_matched == '/activate': from urllib.parse import urlparse query = urlparse(self.path).query query_components = dict(qc.split("=") for qc in query.split("&")) # print(query_components, "query_components----->", query) if query_components['token']: self.path = '/activate' elif is_matched == '/reset': from urllib.parse import urlparse query = urlparse(self.path).query query_components = dict(qc.split("=") for qc in query.split("&")) # print(query_components, "query_components----->", query) if query_components['token']: self.path = '/reset' if self.path == '/register': response = User().register(that=self) if response['success']: user_data = response['data'][0] response = UserService().register(user_data=user_data, that=self) Response(self).jsonResponse(status=200, data=response) if self.path == '/activate': response = UserService().activate(token=query_components['token']) return Response(self).jsonResponse(status=200, data=response) if self.path == '/login': response = User().login(that=self) if response['success']: user_data = response['data'][0] response = UserService().login(user_data=user_data) Response(self).jsonResponse(status=200, data=response) if self.path == '/forgot': response = User().forgot(that=self) if response['success']: user_data = response['data'][0] response = UserService().forgot(user_data=user_data, that=self) Response(self).jsonResponse(status=200, data=response) if self.path == '/reset': response = User().reset(that=self) if response['success']: user_data = response['data'][0] response = UserService().reset(token=query_components['token'], user_data=user_data) Response(self).jsonResponse(status=200, data=response) if self.path == '/create_note': response = Note().create_note(that=self) if response['success']: user_data = response['data'][0] response = NoteService().create_note(user_data=user_data) Response(self).jsonResponse(status=200, data=response) if self.path == '/upload_profile_pic': response = NoteService().upload_photo(that=self) Response(self).jsonResponse(status=200, data=response) if self.path == '/collaborator': response = Note().collaborator(that=self) if response['success']: user_data = response['data'][0] response = NoteService().collaborator(user_data=user_data) Response(self).jsonResponse(status=200, data=response)
def main(): #### Create a response object response = Response() #### Create a default ARAX Message messenger = ARAXMessenger() result = messenger.create_message() response.merge(result) if result.status != 'OK': print(response.show(level=Response.DEBUG)) return response message = messenger.message #### Some qnode examples parameters_sets = [ # { 'curie': 'DOID:9281'}, { 'curie': 'Orphanet:673' }, { 'name': 'acetaminophen' }, { 'curie': 'NCIT:C198' }, { 'curie': 'CUI:C4710278' }, { 'type': 'protein', 'id': 'n10' }, { 'curie': ['UniProtKB:P14136', 'UniProtKB:P35579'] }, { 'curie': ['UniProtKB:P14136', 'UniProtKB:P35579'], 'is_set': 'false' }, ] for parameter in parameters_sets: #### Add a QNode result = messenger.add_qnode(message, parameter) response.merge(result) if result.status != 'OK': print(response.show(level=Response.DEBUG)) return response #### Some qedge examples parameters_sets = [ { 'source_id': 'n00', 'target_id': 'n01' }, { 'source_id': 'n01', 'target_id': 'n10', 'type': 'treats' }, ] for parameter in parameters_sets: #### Add a QEdge result = messenger.add_qedge(message, parameter) response.merge(result) if result.status != 'OK': print(response.show(level=Response.DEBUG)) return response if 0: result = messenger.reassign_curies(message, { 'knowledge_provider': 'KG1', 'mismap_result': 'WARNING' }) response.merge(result) if result.status != 'OK': print(response.show(level=Response.DEBUG)) return response #### Show the final result print(response.show(level=Response.DEBUG)) print(json.dumps(ast.literal_eval(repr(message)), sort_keys=True, indent=2))
class Router(object): def __init__(self, *namespaces, **kwargs): """Takes at least one namespace. """ self.name = kwargs.pop('name', str(id(self))) self.logger = logging.getLogger("simpleapi.%s" % self.name) self.nmap = {} self.debug = kwargs.pop('debug', False) self.ignore_unused_args = kwargs.pop('ignore_unused_args', False) if self.debug and not has_debug: self.debug = False warnings.warn( "Debugging disabled since packages pstats/cProfile not found (maybe you have to install it)." ) self.restful = kwargs.pop('restful', False) self.framework = kwargs.pop('framework', 'django') self.path = re.compile(kwargs.pop('path', r'^/')) assert len(kwargs) == 0, u'Unknown Route configuration(s) (%s)' % \ ", ".join(kwargs.keys()) # make shortcut self._caller = self.__call__ assert self.framework in FRAMEWORKS assert (self.debug ^ SIMPLEAPI_DEBUG) or \ not (self.debug and SIMPLEAPI_DEBUG), \ u'You can either activate Route-debug or simpleapi-debug, not both.' if self.debug or SIMPLEAPI_DEBUG: self.logger.setLevel(logging.DEBUG) handler = logging.StreamHandler() formatter = logging.Formatter( "%(asctime)s - %(name)s - %(levelname)s - %(message)s") handler.setFormatter(formatter) self.logger.addHandler(handler) else: self.logger.setLevel(logging.WARNING) if SIMPLEAPI_DEBUG and SIMPLEAPI_DEBUG_LEVEL == 'all': self.profile_start() for namespace in namespaces: self.add_namespace(namespace) def handle_request(self, environ, start_response): if not self.path.match(environ.get('PATH_INFO')): status = '404 Not found' start_response(status, []) return ["Entry point not found"] else: content_type = environ.get('CONTENT_TYPE') try: content_length = int(environ['CONTENT_LENGTH']) except (KeyError, ValueError): content_length = 0 # make sure we ignore too large requests for security and stability # reasons if content_length > MAX_CONTENT_LENGTH: status = '413 Request entity too large' start_response(status, []) return ["Request entity too large"] request_method = environ.get('REQUEST_METHOD', '').lower() # make sure we only support methods we care if not request_method in TRIGGERED_METHODS: status = '501 Not Implemented' start_response(status, []) return ["Not Implemented"] query_get = urlparse.parse_qs(environ.get('QUERY_STRING')) for key, value in query_get.iteritems(): query_get[key] = value[0] # respect the first value only query_post = {} if content_type in [ 'application/x-www-form-urlencoded', 'application/x-url-encoded' ]: post_env = environ.copy() post_env['QUERY_STRING'] = '' fs = cgi.FieldStorage(fp=environ['wsgi.input'], environ=post_env, keep_blank_values=True) query_post = {} for key in fs: query_post[key] = fs.getvalue(key) elif content_type == 'multipart/form-data': # XXX TODO raise NotImplementedError, u'Currently not supported.' # GET + POST query_data = query_get query_data.update(query_post) # Make request request = StandaloneRequest() request.method = request_method request.data = query_data request.remote_addr = environ.get('REMOTE_ADDR', '') # Make call result = self._caller(request) status = '200 OK' headers = [('Content-type', result['mimetype'])] start_response(status, headers) return [ result['result'], ] def serve(self, host='', port=5050): httpd = make_server(host, port, self.handle_request) self.logger.info(u"Started serving on port %d..." % port) try: httpd.serve_forever() except KeyboardInterrupt: self.logger.info(u"Server stopped.") def profile_start(self): assert has_debug self.profile = cProfile.Profile() self.profile.enable() def profile_stop(self): assert has_debug self.profile.disable() self.profile.dump_stats(SIMPLEAPI_DEBUG_FILENAME) def profile_stats(self): assert has_debug self.logger.debug(u"Loading stats...") stats = pstats.Stats(SIMPLEAPI_DEBUG_FILENAME) stats.strip_dirs().sort_stats('time', 'calls') \ .print_stats() def __del__(self): if SIMPLEAPI_DEBUG and SIMPLEAPI_DEBUG_LEVEL == 'all': self.profile_stop() self.profile_stats() def is_standalone(self): return self.framework in ['standalone', 'wsgi'] def is_dummy(self): return self.framework == 'dummy' def is_appengine(self): return self.framework == 'appengine' def is_flask(self): return self.framework == 'flask' def is_django(self): return self.framework == 'django' def _redefine_default_namespace(self): # - recalculate default namespace version - # if map has no default version, determine namespace with the # highest version if self.nmap.has_key('default'): del self.nmap['default'] self.nmap['default'] = self.nmap[max(self.nmap.keys())] def remove_namespace(self, version): if self.nmap.has_key(version): del self.nmap[version] self._redefine_default_namespace() return True else: return False def add_namespace(self, namespace): version = getattr(namespace, '__version__', 1) assert isinstance(version, int), \ u'version must be either an integer or not set' # make sure no version is assigned twice assert not self.nmap.has_key(version), u'version is assigned twice' allowed_functions = [] # check for introspection allowed if getattr(namespace, '__introspection__', False): allowed_functions.append('introspect') # determine public and published functions functions = filter( lambda item: '__' not in item[0] and item[0] not in restricted_functions and ((getattr(item[1], 'published', False) == True) or item[0] in allowed_functions), inspect.getmembers(namespace)) # determine arguments of each function functions = dict(functions) for function_name, function_method in functions.iteritems(): # check for reserved function names assert function_name not in ['error', '__init__', 'get_name'],\ u'Name %s is reserved.' % function_name # ArgSpec(args=['self', 'a', 'b'], varargs=None, keywords=None, defaults=None) raw_args = inspect.getargspec(function_method) # does the function allows kwargs? kwargs_allowed = raw_args[2] is not None # get all arguments all_args = raw_args[0][1:] # exclude `self´ # build a dict of optional arguments if raw_args[3] is not None: default_args = zip(raw_args[0][-len(raw_args[3]):], raw_args[3]) default_args = dict(default_args) else: default_args = {} # build a list of obligatory arguments obligatory_args = list(set(all_args) - set(default_args.keys())) # determine constraints for function if hasattr(function_method, 'constraints'): constraints = function_method.constraints assert isinstance(constraints, dict) or callable(constraints) if isinstance(constraints, dict): def check_constraint(constraints): def check(namespace, key, value): constraint = constraints.get(key) if not constraint: return value if hasattr(constraint, 'match'): if constraint.match(value): return value else: raise ValueError( u'%s does not match constraint') else: if isinstance(constraint, bool): return bool(int(value)) else: return constraint(value) return check constraint_function = check_constraint(constraints) elif callable(constraints): constraint_function = constraints else: constraints = None constraint_function = lambda namespace, key, value: value # determine allowed methods if hasattr(function_method, 'methods'): allowed_methods = function_method.methods assert isinstance(allowed_methods, (list, tuple)) method_function = lambda method, methods: method in methods else: allowed_methods = None method_function = lambda method, methods: True # determine format format = getattr(function_method, 'format', lambda val: val) functions[function_name] = { 'method': function_method, 'name': function_name, 'args': { 'raw': raw_args, 'all': all_args, 'obligatory': obligatory_args, 'defaults': default_args, 'kwargs_allowed': kwargs_allowed }, 'constraints': { 'function': constraint_function, 'raw': constraints, }, 'format': format, 'methods': { 'function': method_function, 'allowed_methods': allowed_methods, } } # configure authentication if hasattr(namespace, '__authentication__'): authentication = namespace.__authentication__ if isinstance(authentication, basestring): if hasattr(namespace, authentication): authentication = getattr(namespace, authentication) else: authentication = lambda namespace, access_key: \ namespace.__authentication__ == access_key else: # grant allow everyone access authentication = lambda namespace, access_key: True # configure ip address based access rights if hasattr(namespace, '__ip_restriction__'): ip_restriction = namespace.__ip_restriction__ assert isinstance(ip_restriction, list) or callable(ip_restriction) if isinstance(ip_restriction, list): # make the ip address list wildcard searchable namespace.__ip_restriction__ = \ glob_list(namespace.__ip_restriction__) # restrict access to the given ip address list ip_restriction = lambda namespace, ip: ip in \ namespace.__ip_restriction__ else: # accept every ip address ip_restriction = lambda namespace, ip: True # configure input formatters input_formatters = formatters.copy() allowed_formatters = getattr(namespace, '__input__', formatters.get_defaults()) input_formatters = filter(lambda i: i[0] in allowed_formatters, input_formatters.items()) input_formatters = dict(input_formatters) # configure output formatters output_formatters = formatters.copy() allowed_formatters = getattr(namespace, '__output__', formatters.get_defaults()) output_formatters = filter(lambda i: i[0] in allowed_formatters, output_formatters.items()) output_formatters = dict(output_formatters) # configure wrappers useable_wrappers = wrappers.copy() if hasattr(namespace, '__wrapper__'): allowed_wrapper = namespace.__wrapper__ useable_wrappers = filter(lambda i: i[0] in allowed_wrapper, useable_wrappers.items()) useable_wrappers = dict(useable_wrappers) self.nmap[version] = { 'class': namespace, 'functions': functions, 'ip_restriction': ip_restriction, 'authentication': authentication, 'input_formatters': input_formatters, 'output_formatters': output_formatters, 'wrappers': useable_wrappers, } # set up all features features = [] if hasattr(namespace, '__features__'): raw_features = namespace.__features__ for feature in raw_features: assert isinstance(feature, basestring) or \ issubclass(feature, Feature) if isinstance(feature, basestring): assert feature in __features__.keys(), \ u'%s is not a built-in feature' % feature features.append(__features__[feature](self.nmap[version])) elif issubclass(feature, Feature): features.append(feature(self.nmap[version])) self.nmap[version]['features'] = features self._redefine_default_namespace() return version def __call__(self, http_request=None, **urlparameters): sapi_request = SAPIRequest(self, http_request) request_items = dict(sapi_request.REQUEST.items()) request_items.update(urlparameters) if SIMPLEAPI_DEBUG and SIMPLEAPI_DEBUG_LEVEL == 'call': self.logger.info(pprint.pformat(request_items)) self.profile_start() version = request_items.pop('_version', 'default') callback = request_items.pop('_callback', None) output_formatter = request_items.pop('_output', None) # let's activate JSONP automatically if _callback is given if callback and not output_formatter: output_formatter = 'jsonp' elif not output_formatter: output_formatter = 'json' input_formatter = request_items.pop('_input', 'value') wrapper = request_items.pop('_wrapper', 'default') mimetype = request_items.pop('_mimetype', None) input_formatter_instance = None output_formatter_instance = None wrapper_instance = None try: try: version = int(version) except (ValueError, TypeError): pass if not self.nmap.has_key(version): # continue with wrong version to get the formatters/wrappers # raise the error later! namespace = self.nmap['default'] else: namespace = self.nmap[version] # check input formatter if input_formatter not in namespace['input_formatters']: raise RequestException(u'Input formatter not allowed or ' \ 'unknown: %s' % input_formatter) # get input formatter input_formatter_instancec = namespace['input_formatters'][ input_formatter](sapi_request, callback) # check output formatter if output_formatter not in namespace['output_formatters']: raise RequestException(u'Output formatter not allowed or ' \ 'unknown: %s' % output_formatter) # get output formatter output_formatter_instance = namespace['output_formatters'][ output_formatter](sapi_request, callback) # check wrapper if wrapper not in namespace['wrappers']: raise RequestException(u'Wrapper unknown or not allowed: %s' % \ wrapper) # get wrapper wrapper_instance = namespace['wrappers'][wrapper] # check whether version exists or not if not self.nmap.has_key(version): raise RouterException(u'Version %s not found (possible: %s)' % \ (version, ", ".join(map(lambda i: str(i), self.nmap.keys())))) request = Request( sapi_request=sapi_request, namespace=namespace, input_formatter=input_formatter_instancec, output_formatter=output_formatter_instance, wrapper=wrapper_instance, callback=callback, mimetype=mimetype, restful=self.restful, debug=self.debug, route=self, ignore_unused_args=self.ignore_unused_args, ) # map request items to the correct names wi = wrapper_instance(sapi_request=sapi_request) request_items = wi._parse(request_items) if not isinstance(request_items, (list, tuple, types.GeneratorType)): request_items = [ request_items, ] responses = [] for request_item in request_items: # clear session (except _internal) sapi_request.session.clear() # process request try: responses.append(request.process_request(request_item)) except (NamespaceException, RequestException, \ ResponseException, RouterException, FeatureException),e: response = Response( sapi_request, errors=e.message, output_formatter=output_formatter_instance, wrapper=wrapper_instance, mimetype=mimetype) responses.append(response) rm = ResponseMerger( sapi_request=sapi_request, responses=responses, ) http_response = rm.build() except Exception, e: if isinstance(e, (NamespaceException, RequestException, \ ResponseException, RouterException, \ FeatureException)): err_msg = repr(e) else: err_msg = u'An internal error occurred during your request.' trace = inspect.trace() msgs = [] msgs.append('') msgs.append(u"******* Exception raised *******") msgs.append(u'Exception type: %s' % type(e)) msgs.append(u'Exception msg: %s' % repr(e)) msgs.append('') msgs.append(u'------- Traceback follows -------') for idx, item in enumerate(trace): msgs.append(u"(%s)\t%s:%s (%s)" % (idx + 1, item[3], item[2], item[1])) if item[4]: for line in item[4]: msgs.append(u"\t\t%s" % line.strip()) msgs.append('') # blank line msgs.append(' -- End of traceback -- ') msgs.append('') self.logger.error("\n".join(msgs)) if self.debug: e, m, tb = sys.exc_info() pdb.post_mortem(tb) response = Response(sapi_request, errors=err_msg, output_formatter=output_formatter_instance, wrapper=wrapper_instance, mimetype=mimetype) http_response = response.build(skip_features=True)
def get_usage(self, key): url = urlparse.urljoin(self.endpoint.base_url, 'monitors/%s/usage/' % key) resp = self.endpoint.get(url) return Response(resp, self.endpoint)
def _send(self, response: Response): serialized_response = response.serialize() client_socket = response.client_socket client_socket.send(serialized_response) client_socket.close()
def __call__(self, http_request=None, **urlparameters): sapi_request = SAPIRequest(self, http_request) request_items = dict(sapi_request.REQUEST.items()) request_items.update(urlparameters) if SIMPLEAPI_DEBUG and SIMPLEAPI_DEBUG_LEVEL == 'call': self.logger.info(pprint.pformat(request_items)) self.profile_start() version = request_items.pop('_version', 'default') callback = request_items.pop('_callback', None) output_formatter = request_items.pop('_output', None) # let's activate JSONP automatically if _callback is given if callback and not output_formatter: output_formatter = 'jsonp' elif not output_formatter: output_formatter = 'json' input_formatter = request_items.pop('_input', 'value') wrapper = request_items.pop('_wrapper', 'default') mimetype = request_items.pop('_mimetype', None) input_formatter_instance = None output_formatter_instance = None wrapper_instance = None try: try: version = int(version) except (ValueError, TypeError): pass if not self.nmap.has_key(version): # continue with wrong version to get the formatters/wrappers # raise the error later! namespace = self.nmap['default'] else: namespace = self.nmap[version] # check input formatter if input_formatter not in namespace['input_formatters']: raise RequestException(u'Input formatter not allowed or ' \ 'unknown: %s' % input_formatter) # get input formatter input_formatter_instancec = namespace['input_formatters'][ input_formatter](sapi_request, callback) # check output formatter if output_formatter not in namespace['output_formatters']: raise RequestException(u'Output formatter not allowed or ' \ 'unknown: %s' % output_formatter) # get output formatter output_formatter_instance = namespace['output_formatters'][ output_formatter](sapi_request, callback) # check wrapper if wrapper not in namespace['wrappers']: raise RequestException(u'Wrapper unknown or not allowed: %s' % \ wrapper) # get wrapper wrapper_instance = namespace['wrappers'][wrapper] # check whether version exists or not if not self.nmap.has_key(version): raise RouterException(u'Version %s not found (possible: %s)' % \ (version, ", ".join(map(lambda i: str(i), self.nmap.keys())))) request = Request( sapi_request=sapi_request, namespace=namespace, input_formatter=input_formatter_instancec, output_formatter=output_formatter_instance, wrapper=wrapper_instance, callback=callback, mimetype=mimetype, restful=self.restful, debug=self.debug, route=self, ignore_unused_args=self.ignore_unused_args, ) # map request items to the correct names wi = wrapper_instance(sapi_request=sapi_request) request_items = wi._parse(request_items) if not isinstance(request_items, (list, tuple, types.GeneratorType)): request_items = [ request_items, ] responses = [] for request_item in request_items: # clear session (except _internal) sapi_request.session.clear() # process request try: responses.append(request.process_request(request_item)) except (NamespaceException, RequestException, \ ResponseException, RouterException, FeatureException),e: response = Response( sapi_request, errors=e.message, output_formatter=output_formatter_instance, wrapper=wrapper_instance, mimetype=mimetype) responses.append(response) rm = ResponseMerger( sapi_request=sapi_request, responses=responses, ) http_response = rm.build()
def main(): ### Note that most of this is just manually doing what ARAXQuery() would normally do for you #### Create a response object response = Response() #### Create an ActionsParser object from actions_parser import ActionsParser actions_parser = ActionsParser() #### Set a simple list of actions # actions_list = [ # "overlay(compute_confidence_scores=true)", # "return(message=true,store=false)" # ] actions_list = [ #"filter_kg(action=remove_edges_by_type, edge_type=physically_interacts_with, remove_connected_nodes=false)", #"filter_kg(action=remove_edges_by_type, edge_type=physically_interacts_with, remove_connected_nodes=something)", #"filter(action=remove_nodes_by_type, node_type=protein)", #"overlay(action=compute_ngd)", #"filter(action=remove_edges_by_attribute, edge_attribute=ngd, threshold=.63, direction=below, remove_connected_nodes=t)", #"filter(action=remove_edges_by_attribute, edge_attribute=ngd, threshold=.6, remove_connected_nodes=False)", "filter(action=remove_orphaned_nodes)", "return(message=true,store=false)" ] #### Parse the action_list and print the result result = actions_parser.parse(actions_list) response.merge(result) if result.status != 'OK': print(response.show(level=Response.DEBUG)) return response actions = result.data['actions'] #### Read message #2 from the database. This should be the acetaminophen proteins query result message sys.path.append( os.path.dirname(os.path.abspath(__file__)) + "/../../UI/Feedback") from RTXFeedback import RTXFeedback araxdb = RTXFeedback() #message_dict = araxdb.getMessage(2) # acetaminophen2proteins graph # message_dict = araxdb.getMessage(13) # ibuprofen -> proteins -> disease # work computer # message_dict = araxdb.getMessage(14) # pleuropneumonia -> phenotypic_feature # work computer # message_dict = araxdb.getMessage(16) # atherosclerosis -> phenotypic_feature # work computer # message_dict = araxdb.getMessage(5) # atherosclerosis -> phenotypic_feature # home computer # message_dict = araxdb.getMessage(10) message_dict = araxdb.getMessage(40) #### The stored message comes back as a dict. Transform it to objects from ARAX_messenger import ARAXMessenger message = ARAXMessenger().from_dict(message_dict) # print(json.dumps(ast.literal_eval(repr(message)),sort_keys=True,indent=2)) #### Create an overlay object and use it to apply action[0] from the list #filterkg = ARAXFilterKG() #result = filterkg.apply(message, actions[0]['parameters']) #response.merge(result) # Apply overlay so you get an edge attribute to work with, then apply the filter #from ARAX_overlay import ARAXOverlay #overlay = ARAXOverlay() #result = overlay.apply(message, actions[0]['parameters']) #response.merge(result) # then apply the filter filterkg = ARAXFilterKG() result = filterkg.apply(message, actions[0]['parameters']) response.merge(result) # if result.status != 'OK': # print(response.show(level=Response.DEBUG)) # return response # response.data = result.data #### If successful, show the result # print(response.show(level=Response.DEBUG)) # response.data['message_stats'] = { 'n_results': message.n_results, 'id': message.id, # 'reasoner_id': message.reasoner_id, 'tool_version': message.tool_version } # response.data['message_stats']['confidence_scores'] = [] # for result in message.results: # response.data['message_stats']['confidence_scores'].append(result.confidence) # print(json.dumps(ast.literal_eval(repr(response.data['parameters'])),sort_keys=True,indent=2)) # print(json.dumps(ast.literal_eval(repr(response.data['message_stats'])),sort_keys=True,indent=2)) # a comment on the end so you can better see the network on github # look at the response # print(response.show(level=Response.DEBUG)) # print(response.show()) # print("Still executed") # look at the edges # print(json.dumps(ast.literal_eval(repr(message.knowledge_graph.edges)),sort_keys=True,indent=2)) # print(json.dumps(ast.literal_eval(repr(message.knowledge_graph.nodes)), sort_keys=True, indent=2)) # print(json.dumps(ast.literal_eval(repr(message)), sort_keys=True, indent=2)) # print(response.show(level=Response.DEBUG)) # just print off the values # print(json.dumps(ast.literal_eval(repr(message.knowledge_graph.edges)), sort_keys=True, indent=2)) # for edge in message.knowledge_graph.edges: # if hasattr(edge, 'edge_attributes') and edge.edge_attributes and len(edge.edge_attributes) >= 1: # print(edge.edge_attributes.pop().value) print( json.dumps(ast.literal_eval(repr(message.knowledge_graph.edges)), sort_keys=True, indent=2)) print(response.show(level=Response.DEBUG)) vals = [] for node in message.knowledge_graph.nodes: print(node.id) print(len(message.knowledge_graph.nodes)) for edge in message.knowledge_graph.edges: if hasattr(edge, 'edge_attributes') and edge.edge_attributes and len( edge.edge_attributes) >= 1: vals.append(edge.edge_attributes.pop().value) print(sorted(vals))
def validate_and_add(product_data: ProductFormData, db): if product_data.name == "": return Response(0, -2, "Missing Name") if product_data.product_type == "": return Response(0, -2, "Missing Type") product = Product(product_data.name) product.product_type = "Unknown" if "Eyeshadow" == product_data.product_type or "Mascara" == product_data.product_type: product.product_type = product_data.product_type product.product_family = ProductFamily.EYES if "Eyeshadow" == product_data.product_type and product.name.contains( "Queen"): product.product_range = ProductRange.QUEEN product.range = ProductRange.BUDGET if product_data.packaging_recyclable: product.range = ProductRange.PROFESSIONAL if "Foundation" == product_data.product_type: if product_data.suggested_price > 10: product.range = ProductRange.PROFESSIONAL if "Lipstick" == product_data.product_type: product.product_type = product_data.product_type product.product_family = ProductFamily.LIPS if product_data.suggested_price > 10: product.range = ProductRange.PROFESSIONAL if product_data.suggested_price > 20: if 0 < product_data.weight < 10: return Response(0, -1, "Error - failed quality check for Queen Range") product.range = ProductRange.QUEEN if "Mascara" == product_data.product_type: product.product_family = ProductFamily.LASHES if product_data.suggested_price > 15: product.range = ProductRange.PROFESSIONAL if product_data.suggested_price > 25 and product_data.packaging_recyclable: product.range = ProductRange.QUEEN if product_data.weight < 0: return Response(0, -3, "Weight error") product.weight = product_data.weight if "Blusher" == product_data.product_type or "Foundation" == product_data.product_type: product.product_type = product_data.product_type product.product_family = ProductFamily.SKIN if "Blusher" == product_data.product_type and product_data.weight > 10: return Response(0, -3, "Error - weight too high") if not product_data.packaging_recyclable and product.range == ProductRange.QUEEN: return Response(0, -1, "Error - failed quality check for Queen Range") if "Unknown" == product.product_type: return Response(0, -1, f"Unknown product type {product_data.product_type}") return Response(db.store_product(product), 0, "Product Successfully Added")
def apply(self, input_message, input_parameters, response=None): #### Define a default response if response is None: response = Response() self.response = response self.message = input_message #### Basic checks on arguments if not isinstance(input_parameters, dict): response.error("Provided parameters is not a dict", error_code="ParametersNotDict") return response # list of actions that have so far been created for ARAX_overlay allowable_actions = self.allowable_actions # check to see if an action is actually provided if 'action' not in input_parameters: response.error( f"Must supply an action. Allowable actions are: action={allowable_actions}", error_code="MissingAction") elif input_parameters['action'] not in allowable_actions: response.error( f"Supplied action {input_parameters['action']} is not permitted. Allowable actions are: {allowable_actions}", error_code="UnknownAction") #### Return if any of the parameters generated an error (showing not just the first one) if response.status != 'OK': return response # populate the parameters dict parameters = dict() for key, value in input_parameters.items(): parameters[key] = value #### Store these final parameters for convenience response.data['parameters'] = parameters self.parameters = parameters # convert the action string to a function call (so I don't need a ton of if statements getattr( self, '_' + self.__class__.__name__ + '__' + parameters['action'] )( ) # thank you https://stackoverflow.com/questions/11649848/call-methods-by-string response.debug( f"Applying Overlay to Message with parameters {parameters}" ) # TODO: re-write this to be more specific about the actual action # TODO: add_pubmed_ids # TODO: compute_confidence_scores # TODO: finish COHD # TODO: Jaccard #### Return the response and done if self.report_stats: # helper to report information in debug if class self.report_stats = True response = self.report_response_stats(response) return response
def main(): print("start ARAX_overlay") #### Note that most of this is just manually doing what ARAXQuery() would normally do for you #### Create a response object response = Response() #### Create an ActionsParser object from actions_parser import ActionsParser actions_parser = ActionsParser() #### Set a simple list of actions #actions_list = [ # "overlay(compute_confidence_scores=true)", # "return(message=true,store=false)" #] actions_list = [ #"overlay(action=compute_ngd)", #"overlay(action=compute_ngd, virtual_edge_type=NGD1, source_qnode_id=n00, target_qnode_id=n01)", #"overlay(action=overlay_clinical_info, paired_concept_frequency=true)", # "overlay(action=overlay_clinical_info, paired_concept_frequency=true, virtual_edge_type=P1, source_qnode_id=n00, target_qnode_id=n01)", #"overlay(action=compute_jaccard, start_node_id=n00, intermediate_node_id=n01, end_node_id=n02, virtual_edge_type=J1)", #"overlay(action=add_node_pmids)", #"overlay(action=overlay_clinical_info, observed_expected_ratio=true)", #"overlay(action=overlay_clinical_info, paired_concept_frequency=true, virtual_edge_type=P1, source_qnode_id=n00, target_qnode_id=n01)", "overlay(action=predict_drug_treats_disease, source_qnode_id=n01, target_qnode_id=n00, virtual_edge_type=P1)", "return(message=true,store=false)" ] #### Parse the action_list and print the result result = actions_parser.parse(actions_list) response.merge(result) if result.status != 'OK': print(response.show(level=Response.DEBUG)) return response actions = result.data['actions'] #### Read message #2 from the database. This should be the acetaminophen proteins query result message sys.path.append( os.path.dirname(os.path.abspath(__file__)) + "/../../UI/Feedback") from RTXFeedback import RTXFeedback araxdb = RTXFeedback() #message_dict = araxdb.getMessage(2) # acetaminophen2proteins graph # message_dict = araxdb.getMessage(13) # ibuprofen -> proteins -> disease # work computer #message_dict = araxdb.getMessage(14) # pleuropneumonia -> phenotypic_feature # work computer #message_dict = araxdb.getMessage(16) # atherosclerosis -> phenotypic_feature # work computer #message_dict = araxdb.getMessage(5) # atherosclerosis -> phenotypic_feature # home computer #message_dict = araxdb.getMessage(10) #message_dict = araxdb.getMessage(36) # test COHD obs/exp, via ARAX_query.py 16 #message_dict = araxdb.getMessage(39) # ngd virtual edge test message_dict = araxdb.getMessage(1) #### The stored message comes back as a dict. Transform it to objects from ARAX_messenger import ARAXMessenger message = ARAXMessenger().from_dict(message_dict) #print(json.dumps(ast.literal_eval(repr(message)),sort_keys=True,indent=2)) #### Create an overlay object and use it to apply action[0] from the list print("Applying action") overlay = ARAXOverlay() result = overlay.apply(message, actions[0]['parameters']) response.merge(result) print("Finished applying action") #if result.status != 'OK': # print(response.show(level=Response.DEBUG)) # return response #response.data = result.data #### If successful, show the result #print(response.show(level=Response.DEBUG)) #response.data['message_stats'] = { 'n_results': message.n_results, 'id': message.id, # 'reasoner_id': message.reasoner_id, 'tool_version': message.tool_version } #response.data['message_stats']['confidence_scores'] = [] #for result in message.results: # response.data['message_stats']['confidence_scores'].append(result.confidence) #print(json.dumps(ast.literal_eval(repr(response.data['parameters'])),sort_keys=True,indent=2)) #print(json.dumps(ast.literal_eval(repr(response.data['message_stats'])),sort_keys=True,indent=2)) # a comment on the end so you can better see the network on github # look at the response #print(response.show(level=Response.DEBUG)) #print(response.show()) #print("Still executed") # look at the edges #print(json.dumps(ast.literal_eval(repr(message.knowledge_graph.edges)),sort_keys=True,indent=2)) #print(json.dumps(ast.literal_eval(repr(message.knowledge_graph.nodes)), sort_keys=True, indent=2)) #print(json.dumps(ast.literal_eval(repr(message)), sort_keys=True, indent=2)) #print(response.show(level=Response.DEBUG)) # just print off the values #print(json.dumps(ast.literal_eval(repr(message.knowledge_graph.edges)), sort_keys=True, indent=2)) #for edge in message.knowledge_graph.edges: # if hasattr(edge, 'edge_attributes') and edge.edge_attributes and len(edge.edge_attributes) >= 1: # print(edge.edge_attributes.pop().value) #print(f"Message: {json.dumps(ast.literal_eval(repr(message)), sort_keys=True, indent=2)}") #print(message) print( f"KG edges: {json.dumps(ast.literal_eval(repr(message.knowledge_graph.edges)), sort_keys=True, indent=2)}" ) #print(response.show(level=Response.DEBUG)) print("Yet you still got here")
def get(self, appointmentId): appointment, err = Appointment.queryById(appointmentId) if err: return res.badRequestError(err) return res.getSuccess(appointment)
def put(self, data=None, **params): data = utils.jsonify_data(data) response = requests.put(self.path, data=data, params=params, headers=self.headers) self.response = Response(self, response) self.response.raise_if_error() return self.response