def list_api(type,action): reqMethod = request.method domain_name = request.args.get('domain') group_name = request.args.get('group') response = Response() #contentType = request.headers['content-type'] domain = Domain(domain_name) myDoit = DOIT(domain) if type == 'groups': if action == 'list' and reqMethod =='GET': response.data = json.dumps(myDoit.get_groups_by_domain_list()) else: abort(500) elif type == 'domains': if action == 'list' and reqMethod =='GET': response.data = json.dumps(myDoit.get_domain_list()) else: abort(500) elif type == 'hosts': if action == 'list' and reqMethod =='GET': response.data = json.dumps(myDoit.get_host_list()) else: abort(500) elif type == 'group_vars': if action == 'list' and reqMethod =='GET': response.data = json.dumps(myDoit.get_group_vars(group_name)) else: abort(500) response.headers['Content-Type'] = 'application/json' return response
def index(): response = Response(mimetype='text/plain') if request.from_sl: response.data = 'Hello, %s!' % request.sl.name else: response.data = 'Hello, visitor!' return response
def ansible_api(action): reqMethod = request.method domain_name = request.args.get('domain') host_name = request.args.get('host') response = Response() #contentType = request.headers['content-type'] domain = Domain(domain_name) myDoit = DOIT(domain) if action == 'list' and reqMethod == 'GET': if domain_name == None: abort(500) response.data = json.dumps(myDoit.get_inventory()) elif action == 'host' and reqMethod == 'GET': if domain_name == None: abort(500) if host_name == None: abort(500) response.data = json.dumps(myDoit.get_host_info(host_name)) response.headers['Content-Type'] = 'application/json' return response
def error(msg): codec = encoder(flask.request, 'errors') response = Response(status=400) if isinstance(msg, dict): response.data = codec.encode(msg) else: response.data = codec.encode(msg._errors) return response
def show_collection(collection): # Handle specific row requests rowid_match = re.search(r'^(.*)[(](\d+)[)]$', collection) if rowid_match: collection = rowid_match.group(1) rowid = int(rowid_match.group(2)) else: rowid = None # Check that the table exists tables = get_tables('{}/sql/meta'.format(dataset_url)) if collection not in tables: resp = Response() resp.headers[b'Content-Type'] = b'application/xml;charset=utf-8' resp.data = render_template( 'error.xml', message="Resource not found for the segment '{}'.".format( collection) ) return resp # Handle pagination if request.args.get('$skiptoken'): limit = 500 offset = int(request.args.get('$skiptoken')) else: limit = int(request.args.get('$top', 500)) offset = int(request.args.get('$skip', 0)) entries = get_entries_in_collection( '{}/sql'.format(dataset_url), collection, limit=limit, offset=offset, rowid=rowid ) # Add pagination links if required if len(entries) != limit: next_query_string = '' elif request.args.get('$skiptoken'): next_query_string = '?$skiptoken={}'.format(entries[-1]['rowid']) else: next_query_string = '?$top={}&$skip={}'.format(limit, limit + offset) resp = Response() resp.headers[b'Content-Type'] = b'application/xml;charset=utf-8' resp.data = render_template( 'collection.xml', api_server=api_server, api_path=api_path, collection=collection, entries=entries, next_query_string=next_query_string ) return resp
def my_response(data, status_code = 200, status = "ok"): r = Response() r.status = status r.status_code = status_code if data != None: if isinstance(data, str): r.data = data else: r.data = json.dumps(data) return r
def balance_request(app_name, path): # print 'You %s want path: %s' % (app_name, path) #TODO SHOW IN DEBUG MODE init_milli_time = int(round(time.time() * 1000)) endpoint = appserversdao.get_random_endpoint_of_app(app_name) if endpoint: try: h1 = httplib.HTTPConnection(endpoint) headers = dict(request.headers) headers.pop("Content-Length", None) h1.request(method=request.method, url="/%s" % path, headers=headers, body=json.dumps(request.json)) r = h1.getresponse() resp = Response() data = r.read() for header in r.getheaders(): # print 'header %s value %s' % (header[0], r.getheader(header[0])) #TODO SHOW IN DEBUG MODE if header[0] != 'transfer-encoding': resp.headers[header[0]] = str(r.getheader(header[0])) resp.data = data end_milli_time = int(round(time.time() * 1000)) total_milli_time = end_milli_time - init_milli_time thr = threading.Thread(target=logappserverdao.write_apps_log, args=(app_name, str(path), str(request.method), endpoint, total_milli_time , r.status), kwargs={}) thr.start() return resp except: endpoint = appserversdao.get_another_endpoint_of_app(app_name, endpoint) try: h1 = httplib.HTTPConnection(endpoint) headers = dict(request.headers) headers.pop("Content-Length", None) h1.request(method=request.method, url="/%s" % path, headers=headers, body=json.dumps(request.json)) r = h1.getresponse() resp = Response() data = r.read() for header in r.getheaders(): # print 'header %s value %s' % (header[0], r.getheader(header[0])) #TODO SHOW IN DEBUG MODE if header[0] != 'transfer-encoding': resp.headers[header[0]] = str(r.getheader(header[0])) resp.data = data end_milli_time = int(round(time.time() * 1000)) total_milli_time = end_milli_time - init_milli_time thr = threading.Thread(target=logappserverdao.write_apps_log, args=(app_name, str(path), str(request.method), endpoint, total_milli_time, r.status), kwargs={}) thr.start() return resp except: return create_response('We can not request the app in two attempts', httplib.PRECONDITION_FAILED, '04') else: return create_response('No endpoints registered for that app', httplib.CONFLICT, '03')
def create_response(description, status_code, error=None, mimetype='application/json', content=None): res = Response() res.status_code = status_code res.mimetype = mimetype if content: res.data = json.dumps(content) else: f = dict() f['description'] = description if error: f['error'] = error f['status_code'] = status_code res.data = json.dumps(f) return res
def get(self, url): user = auth(session, required=not app.config['OPEN_PROXY']) revision = fetch.get("http://"+url, request.user_agent, user) response = Response(mimetype=revision.mimetype) if not "text" in revision.mimetype: return send_file(revision.bcontent, mimetype=revision.mimetype) else: if 'html' in revision.mimetype: response.data = parser.parse(revision.content, 'http://' + url) else: response.data = revision.content return response
def video(): ''' Video request handler. :return: list of available videos in json format. ''' entries = [] for entry in os.walk(settings.VIDEO_FILES_PATH): if not entry[2]: # there is no file continue date = os.path.basename(entry[0]) for basename in entry[2]: filename = os.path.join(entry[0], basename) relpath = os.path.relpath(filename, start=settings.VIDEO_FILES_PATH) parts = list(urlparse.urlsplit(request.base_url)[:2]) parts.append(settings.VIDEO_FILES_LOCATION + '/' + relpath) parts.extend(['', '']) url = urlparse.urlunsplit(parts) parts[2] = settings.THUMBNAIL_FILES_LOCATION + '/' parts[2] += os.path.splitext(relpath)[0] + '.png' thumbnail = urlparse.urlunsplit(parts) entries.append({'date': date, 'url': url, 'thumbnail': thumbnail}) entries.sort(reverse=True, key=lambda x: x['date']) response = Response() response.headers['Content-Type'] = 'application/json' response.data = json.dumps(entries) return response
def excel_response(spreadsheet, filename=u'export.xls'): """ Prepares an excel spreadsheet for response in Flask :param spreadsheet: the spreadsheet :type spreadsheet:class:`xlwt.Workbook` :param filename: the name of the file when downloaded :type filename: unicode :return: the flask response :rtype:class:`flask.Response` """ response = Response() response.status_code = 200 output = StringIO.StringIO() spreadsheet.save(output) response.data = output.getvalue() mimetype_tuple = mimetypes.guess_type(filename) #HTTP headers for forcing file download response_headers = Headers({ u'Pragma': u"public", # required, u'Expires': u'0', u'Cache-Control': [u'must-revalidate, post-check=0, pre-check=0', u'private'], u'Content-Type': mimetype_tuple[0], u'Content-Disposition': u'attachment; filename=\"%s\";' % filename, u'Content-Transfer-Encoding': u'binary', u'Content-Length': len(response.data) }) if not mimetype_tuple[1] is None: response_headers.update({u'Content-Encoding': mimetype_tuple[1]}) response.headers = response_headers response.set_cookie(u'fileDownload', u'true', path=u'/') return response
def write_client_excel(medium_data, year): response = Response() response.status_code = 200 output = StringIO.StringIO() workbook = xlsxwriter.Workbook(output) worksheet = workbook.add_worksheet() align_center = workbook.add_format( {'align': 'center', 'valign': 'vcenter', 'border': 1}) keys = [u"媒体名称", u"类别"] + [u"%s-%s-01" % (year, str(k)) for k in range(1, 13)] + [u'总计'] for k in range(len(keys)): worksheet.write(0, 0 + k, keys[k], align_center) worksheet.set_column(0, k, 15) th = 1 for k in medium_data: th = _insert_medium_data(workbook, worksheet, k, th) workbook.close() response.data = output.getvalue() filename = ("%s-%s.xls" % (u"MediumsWeekly", datetime.datetime.now().strftime('%Y%m%d%H%M%S'))) mimetype_tuple = mimetypes.guess_type(filename) response_headers = Headers({ 'Pragma': "public", 'Expires': '0', 'Cache-Control': 'must-revalidate, post-check=0, pre-check=0', 'Cache-Control': 'private', 'Content-Type': mimetype_tuple[0], 'Content-Disposition': 'attachment; filename=\"%s\";' % filename, 'Content-Transfer-Encoding': 'binary', 'Content-Length': len(response.data) }) response.headers = response_headers response.set_cookie('fileDownload', 'true', path='/') return response
def pmt_detail_view(): r = txn_detail_def.R_TXNPayment_Detail() #R_MSN_Detail() q = txn_detail_def.Q_TXNPayment_Detail() #Q_MSN_Detail() #*****************************Present Form of the Qualifiers for the Report myForm = q.as_webform(SuperCoolForm, title=r.name, description=r.__doc__) myForm + InputField('limit', dtype=int, label='LIMIT', size=5, default=100, help='Limit the query to ## rows only. Set to 0 or blank for ALL') \ + CheckboxField('sql_only', dtype=bool, default=False, label='SQL only (no run!)', help='Just show generated SQL, do NOT throw at the database.') try: if myForm.process_me(request): #build form to collect qualifiers, present until valid, q.from_form(myForm) q.limit = myForm['limit'] sql_only = myForm['sql_only'] except (ErrorFormNew, ErrorFormInvalid): return make_response(myForm.render_me()) #*****************************Done, Qualifiers are set to the User's input # return debug_0(q) q.qualify() if sql_only: return make_response("<body><pre>%s</pre></body>"%q.sql) result_set = q.load_dataset() DOC_OUT, MIME_TYPE, CON_TYPE = (utils.data_to_texttable)(result_set["ROWS"], r.COLS_OUT, CONTEXT={'title':'payments report'}) response = Response(status=200, mimetype=MIME_TYPE, content_type=CON_TYPE) response.data = DOC_OUT return response
def download_bim_excel(): response = Response() response.status_code = 200 date = datetime.utcnow().strftime('%Y-%m-%d') book = xlwt.Workbook() sheet1 = book.add_sheet('BimLink ' + date) lines = Track.query.join(Area).join(Material).join(Location).filter(Area.id == Track.area_id).filter(Material.id == Track.material_id).filter(Location.id == Track.location_id).all() i = 0 for li in lines: start = (int(round(li.station_start*10)*10)) end = (int(round(li.station_end*10)*10)) #if end > start: num = (end - start)/10 for n in range(0, num): s = start + n*10 e = s+10 excel_id = li.area.area + '_' + li.location.location + '_' + str(s) + '_' + str(e) revit_id = Bimlink.query.filter_by(excel_id = excel_id).first() if revit_id: sheet1.row(i).write(0, revit_id.revit_id) sheet1.row(i).write(1,excel_id) sheet1.row(i).write(2,'Complete') sheet1.row(i).write(3,str(li.date)) sheet1.row(i).write(4,li.material.material) i += 1 output = StringIO.StringIO() book.save(output) response.data = output.getvalue() filename = 'ESA CM005 Waterproofing BIMLink_' + date + '.xls' mimetype_tuple = mimetypes.guess_type(filename) #HTTP headers for forcing file download response_headers = Headers({ 'Pragma': "public", # required, 'Expires': '0', 'Cache-Control': 'must-revalidate, post-check=0, pre-check=0', 'Cache-Control': 'private', # required for certain browsers, 'Content-Type': mimetype_tuple[0], 'Content-Disposition': 'attachment; filename=\"%s\";' % filename, 'Content-Transfer-Encoding': 'binary', 'Content-Length': len(response.data) }) if not mimetype_tuple[1] is None: response.update({ 'Content-Encoding': mimetype_tuple[1] }) response.headers = response_headers #as per jquery.fileDownload.js requirements response.set_cookie('fileDownload', 'true', path='/') return response
def admin_sheet_print(sheetNo): # 获取表格对象 workbook = Workbook() worksheet = workbook.active # 获取表格数据 dataList = get_sheet_data(sheetNo) sheet = dataList['sheet'] heads = [head.head for head in dataList['heads']] datas = dataList['datas'] # 生成表格 # for i in range(len(heads)): # worksheet[chr(65 + i) + str(1)] = heads[i] # 'A'对应的ASCII码为65 # for rowNo in range(len(datas)): # # 每行,除去末尾的branchNo,详见`get_sheet_data`文档 # for i in range(len(datas[rowNo]) - 1): # worksheet[chr(65 + i) + str(rowNo + 2)] = datas[rowNo][i] # 傻逼he,不是有append方法添加一行么 worksheet.append(heads) for data in datas: # 每行,除去末尾的branchNo,详见`get_sheet_data`文档 worksheet.append(data[:len(data) - 1]) # 获取数据流,将其连接至response output = StringIO.StringIO() workbook.save(output) response = Response() response.data = output.getvalue() # 设置相应头 response.headers["Content-Disposition"] = "attachment; filename=%s.xlsx" % sheet.sheetName return response
def upload_file(): if request.method == 'POST': file = request.files['file'] if file and '.' in file.filename and file.filename.rsplit('.', 1)[1].lower() == 'scad': scad_file = tempfile.mktemp('.scad') file.save(scad_file) stl_file = tempfile.mktemp('.stl') try: r = envoy.run('{binary} -o {stl_file} {scad_file}'.format( binary=OPENSCAD_BINARY, stl_file=stl_file, scad_file=scad_file )) except: return "Conversion failed, executable not found?", 500 os.unlink(scad_file) if r.status_code == 0: resp = Response() resp.mimetype = 'application/sla' resp.headers['Content-Disposition'] = 'attachment; filename="{0}.stl"'.format(os.path.basename(secure_filename(file.filename)).rsplit('.', 1)[0]) resp.data = open(stl_file).read() os.unlink(stl_file) return resp else: return "Conversion failed with error {0}".format(r.status_code), 500 return '''<!doctype html>
def compress(content): """ Compress str, unicode content using gzip """ resp = Response() if isinstance(content, Response): resp = content content = resp.data if not IS_PYTHON_3 and isinstance(content, unicode): content = content.encode('utf8') if IS_PYTHON_3: gzip_buffer = BytesIO() gzip_file = gzip.GzipFile(fileobj=gzip_buffer, mode='wb') gzip_file.write(bytes(content, 'utf-8')) else: gzip_buffer = StringIO() gzip_file = gzip.GzipFile(fileobj=gzip_buffer, mode='wb') gzip_file.write(content) gzip_file.close() resp.data = gzip_buffer.getvalue() resp.headers['Content-Encoding'] = 'gzip' resp.headers['Vary'] = 'Accept-Encoding' resp.headers['Content-Length'] = len(resp.data) return resp
def response_file(data, filename): """ :param data: response data :param filename: :return: return a file to client """ from flask import Response from ctypes import create_string_buffer import mimetypes, struct from werkzeug.datastructures import Headers # buf = create_string_buffer(len(data)) # struct.pack_into(str(len(data))+"s", buf, 0, data) response = Response() response.data = data response.status_code = 200 mimetype_tuple = mimetypes.guess_type(filename) response.default_mimetype=mimetype_tuple[0] response_headers = Headers({ 'Pragma': "no-cache", 'Expires': '0', 'Cache-Control': 'must-revalidate, post-check=0, pre-check=0', 'Content-Type': mimetype_tuple[0], 'Content-Disposition': 'attachment; filename=\"%s\";' % filename, 'Content-Transfer-Encoding': 'binary', 'Content-Length': len(response.data) }) if not mimetype_tuple[1] is None: response.headers.update({ 'Content-Encoding': mimetype_tuple[1] }) response.headers = response_headers return response
def thumb(): '''Code borrowed (with modifications) from: http://flask.pocoo.org/mailinglist/ \ archive/2011/1/26/pil-to-create-thumbnails- \ automatically-using-tag/#32aff91e05ba9985a49a76a4fb5338d7''' file = request.args.get('file', None) if not file: raise NotFound() width = request.args.get('width', app.config['THUMB_RESIZE_WIDTH'], type=int) height = request.args.get('height', app.config['THUMB_RESIZE_HEIGHT'], type=int) quality = int(request.args.get('quality', 75)) crop = request.args.get('crop', False) out = resize(file=file, width=width, height=height, crop=crop, quality=quality) response = Response(mimetype='image/jpeg') response.data = out.getvalue() return response
def page_not_found(error): if request.is_xhr: data = '{ "message" : "' + si18n.translate(unicode(error.message)) + '"}' resp = Response(status=500) resp.mimetype = "application/json" resp.data = data return resp return unicode(error)
def widget(): c = request.args.get('c') or abort(404) matched = re_container.match(c) or abort(404) event_id = matched.groupdict()['id'] response = Response(content_type="text/javascript") response.data = render_template('events/widget.js', container=c, event=Event.get(event_id)) return response
def json_response(data, CONTEXT): # response = Response(status=200, mimetype='text/x-json', content_type='text/x-json') response = Response(status=200, mimetype=JSON_MIME, content_type=JSON_CTYPE) # other examples use application/javascript as the mimetype # restful spec and Wikipedia say: application/json response.data = data return response
def write_target_info(targets): response = Response() response.status_code = 200 output = StringIO.StringIO() workbook = xlsxwriter.Workbook(output) worksheet = workbook.add_worksheet() align_left = workbook.add_format( {'align': 'left', 'valign': 'vcenter', 'border': 1}) align_center = workbook.add_format( {'align': 'center', 'valign': 'vcenter', 'border': 1}) keys = [u'名称', u'外包性质', u'类型', u'开户行', u'卡号', u'支付宝', u'联系方式', u'2014年成本', u'2015年成本', u'2016年成本', u'总计'] # 设置高度 # for k in range(1000): # worksheet.set_row(k, 25) outsources = [outsource_to_dict(k) for k in OutSourceExecutiveReport.all()] outsources = [k for k in outsources if k['status'] == 1] for k in range(len(keys)): worksheet.write(0, k, keys[k], align_center) worksheet.set_column(0, k, 30) targets = list(targets) th = 1 for k in range(len(targets)): worksheet.write(th, 0, targets[k].name, align_left) worksheet.write(th, 1, targets[k].otype_cn, align_left) worksheet.write(th, 2, targets[k].type_cn, align_left) worksheet.write(th, 3, targets[k].bank, align_left) worksheet.write(th, 4, targets[k].card, align_left) worksheet.write(th, 5, targets[k].alipay, align_left) worksheet.write(th, 6, targets[k].contract, align_left) worksheet.write(th, 7, sum([o['money'] for o in outsources if o['target_id'] == int( targets[k].id) and int(o['month_day'].year) == 2014]), align_left) worksheet.write(th, 8, sum([o['money'] for o in outsources if o['target_id'] == int( targets[k].id) and int(o['month_day'].year) == 2015]), align_left) worksheet.write(th, 9, sum([o['money'] for o in outsources if o['target_id'] == int( targets[k].id) and int(o['month_day'].year) == 2016]), align_left) worksheet.write(th, 10, sum([o['money'] for o in outsources if o[ 'target_id'] == int(targets[k].id)]), align_left) th += 1 workbook.close() response.data = output.getvalue() filename = ("%s-%s.xls" % ("供应商详情", datetime.datetime.now().strftime('%Y%m%d%H%M%S'))) mimetype_tuple = mimetypes.guess_type(filename) response_headers = Headers({ 'Pragma': "public", 'Expires': '0', 'Cache-Control': 'must-revalidate, post-check=0, pre-check=0', 'Cache-Control': 'private', 'Content-Type': mimetype_tuple[0], 'Content-Disposition': 'attachment; filename=\"%s\";' % filename, 'Content-Transfer-Encoding': 'binary', 'Content-Length': len(response.data) }) response.headers = response_headers response.set_cookie('fileDownload', 'true', path='/') return response
def return_same_as(_, payload: Response): """ Sets JSON Header link referring to @type """ if (payload._status_code == 201) and hasattr(g, MIGRATE_RETURNED_SAME_AS): data = json.loads(payload.data.decode(payload.charset)) data['returnedSameAs'] = getattr(g, MIGRATE_RETURNED_SAME_AS) delattr(g, MIGRATE_RETURNED_SAME_AS) payload.data = json.dumps(data)
def get_api_poll(poll_id): """ Return a single Poll. """ response = Response() question = model.Question.get(poll_id) if not isinstance(question, model.Question): response.status_code = 400 return response response.data = json.dumps(question.json) return response
def upload_data(): file = request.files.get('myfile', None); uploader = Uploader( app.config ) response = Response() response.headers['Content-Type'] ='text/json' res = uploader.check_mime(file) if not res: response.data = '{"status":"BadType"}' return response res = uploader.check_extension(file.filename); if not uploader.sha1(file): response.data = '{"status":"Exist"}' return response uploader.moveFile(file) # if not uploader.check_file_type(): # uploader.delete() # response.data ='{"status":"BadType"}' # return response if not uploader.get_exif(): uploader.delete() response.data = '{"status":"BadExif"}' return response if not uploader.check_date(app.config['EXPIRE_DAYS']): uploader.delete() response.data = '{"status":"VeryOld"}' return response uploader.get_size() uploader.savedb() response.data = '{"status":"OK", "id": "'+ uploader.id +'" }' return response
def write_outs_excel(outs): response = Response() response.status_code = 200 output = StringIO.StringIO() workbook = xlsxwriter.Workbook(output) worksheet = workbook.add_worksheet() align_center = workbook.add_format( {'align': 'center', 'valign': 'vcenter', 'border': 1}) keys = [u'外出申请人', u'参会人', u'开始时间', u'结束时间', u'状态'] worksheet.set_column(0, len(keys), 25) for k in range(len(keys)): worksheet.write(0, k, keys[k], align_center) th = 1 for k in range(len(outs)): if len(outs[k].joiners) > 1: worksheet.merge_range( th, 0, th + len(outs[k].joiners) - 1, 0, outs[k].creator.name, align_center) worksheet.merge_range( th, 2, th + len(outs[k].joiners) - 1, 2, outs[k].start_time_cn, align_center) worksheet.merge_range( th, 3, th + len(outs[k].joiners) - 1, 3, outs[k].end_time_cn, align_center) worksheet.merge_range( th, 4, th + len(outs[k].joiners) - 1, 4, outs[k].status_cn, align_center) joiners = outs[k].joiners for i in range(len(joiners)): worksheet.write(th + i, 1, joiners[i].name, align_center) th += len(outs[k].joiners) else: worksheet.write(th, 0, outs[k].creator.name, align_center) if outs[k].joiners: worksheet.write(th, 1, outs[k].joiners[0].name, align_center) else: worksheet.write(th, 1, '', align_center) worksheet.write(th, 2, outs[k].start_time_cn, align_center) worksheet.write(th, 3, outs[k].end_time_cn, align_center) worksheet.write(th, 4, outs[k].status_cn, align_center) th += 1 workbook.close() response.data = output.getvalue() filename = ("%s-%s.xls" % ("外出报备", datetime.datetime.now().strftime('%Y%m%d%H%M%S'))) mimetype_tuple = mimetypes.guess_type(filename) response_headers = Headers({ 'Pragma': "public", 'Expires': '0', 'Cache-Control': 'must-revalidate, post-check=0, pre-check=0', 'Cache-Control': 'private', 'Content-Type': mimetype_tuple[0], 'Content-Disposition': 'attachment; filename=\"%s\";' % filename, 'Content-Transfer-Encoding': 'binary', 'Content-Length': len(response.data) }) response.headers = response_headers response.set_cookie('fileDownload', 'true', path='/') return response
def ret_(message, status_code, type, data): import ujson response = Response() response.headers["Gainful-Response"] = "Gainful-Response" response.mimetype = "application/json" response.status_code = status_code response.data = ujson.dumps( { "type" : type, "message" : message, "data" : data } ) return response
def write_agent_total_excel(year, agent_obj, total_is_sale_money, total_is_medium_money): response = Response() response.status_code = 200 output = StringIO.StringIO() workbook = xlsxwriter.Workbook(output) worksheet = workbook.add_worksheet() align_left = workbook.add_format( {'align': 'left', 'valign': 'vcenter', 'border': 1}) money_align_left = workbook.add_format( {'align': 'left', 'valign': 'vcenter', 'border': 1, 'num_format': '#,##0.00'}) align_center = workbook.add_format( {'align': 'center', 'valign': 'vcenter', 'border': 1}) keys = [u'代理集团', u'代理', u'合同号', 'campaign', u'销售金额', u'媒介金额'] for k in range(len(keys)): worksheet.set_column(k, 0, 30) worksheet.write(0, k, keys[k], align_center) th = 1 for k in agent_obj: agents = k['agents'] if k['excel_order_count'] == 1: worksheet.write(th, 0, k['name'], align_left) else: worksheet.merge_range(th, 0, th + k['excel_order_count'] - 1, 0, k['name'], align_left) for a in agents: if a['orders']: if len(a['orders']) == 1: worksheet.write(th, 1, a['name'], align_left) else: worksheet.merge_range(th, 1, th + a['html_order_count'] - 1, 1, a['name'], align_left) for o in a['orders']: worksheet.write(th, 2, o['contract'] or u'无合同号', align_left) worksheet.write(th, 3, o['campaign'], align_left) worksheet.write(th, 4, o['is_sale_money'], money_align_left) worksheet.write(th, 5, o['is_medium_money'], money_align_left) th += 1 worksheet.merge_range(th, 0, th, 3, u'总计', align_center) worksheet.write(th, 4, total_is_sale_money, money_align_left) worksheet.write(th, 5, total_is_medium_money, money_align_left) workbook.close() response.data = output.getvalue() filename = ("%s-%s.xls" % ('代理总表', str(year))) mimetype_tuple = mimetypes.guess_type(filename) response_headers = Headers({ 'Pragma': "public", 'Expires': '0', 'Cache-Control': 'must-revalidate, post-check=0, pre-check=0', 'Cache-Control': 'private', 'Content-Type': mimetype_tuple[0], 'Content-Disposition': 'attachment; filename=\"%s\";' % filename, 'Content-Transfer-Encoding': 'binary', 'Content-Length': len(response.data) }) response.headers = response_headers response.set_cookie('fileDownload', 'true', path='/') return response
def write_ondutys_excel(ondutys, start_date, end_date): start_date = start_date.strftime('%Y-%m-%d') end_date = end_date.strftime('%Y-%m-%d') response = Response() response.status_code = 200 output = StringIO.StringIO() workbook = xlsxwriter.Workbook(output) worksheet = workbook.add_worksheet() align_center = workbook.add_format( {'align': 'center', 'valign': 'vcenter', 'border': 1}) red_align_center = workbook.add_format( {'align': 'center', 'valign': 'vcenter', 'border': 1, 'color': 'red'}) url_format = workbook.add_format({ 'font_color': 'blue', 'underline': 1, 'valign': 'vcenter', 'border': 1 }) keys = [u'员工姓名', u'异常次数', u'操作'] worksheet.set_column(0, len(keys), 25) for k in range(len(keys)): worksheet.write(0, k, keys[k], align_center) th = 1 for k in range(len(ondutys)): worksheet.write(th, 0, ondutys[k]['user'].name, align_center) if ondutys[k]['count'] > 0: worksheet.write(th, 1, ondutys[k]['count'], red_align_center) else: worksheet.write(th, 1, ondutys[k]['count'], align_center) worksheet.write_url(th, 2, 'http://z.inad.com/account/onduty/%s/info?start_time=%s&end_time=%s' % ( ondutys[k]['user'].id, start_date, end_date), url_format, u'查看') th += 1 workbook.close() response.data = output.getvalue() filename = ("%s %s-%s.xls" % ("考勤表", start_date, end_date)) mimetype_tuple = mimetypes.guess_type(filename) response_headers = Headers({ 'Pragma': "public", 'Expires': '0', 'Cache-Control': 'must-revalidate, post-check=0, pre-check=0', 'Cache-Control': 'private', 'Content-Type': mimetype_tuple[0], 'Content-Disposition': 'attachment; filename=\"%s\";' % filename, 'Content-Transfer-Encoding': 'binary', 'Content-Length': len(response.data) }) response.headers = response_headers response.set_cookie('fileDownload', 'true', path='/') return response
def get_content(): response = Response() response.headers.add('Accept-ranges', 'bytes') rangeh = request.headers.get('Range') if rangeh: response.status = '206 Partial content' unit, ranges = rangeh.split('=') ranges = re.split(',\s*', ranges) spans = [] for rspec in ranges: if mo := re.match('-(\d+)', rspec): rstart = content_length - int(mo.group(1)) rend = content_length spans.append((rstart, rend)) else: rstart, rend = rspec.split('-') rstart = int(rstart) rend = int(rend) if rend else content_length spans.append((rstart, rend)) if len(spans) == 1: rstart, rend = spans[0] response.data = content[rstart:rend] response.headers.add('content-range', f'{rstart}-{rend}/{content_length}') else: sep = make_sep() parts = [] for span in spans: rstart, rend = span part = MIMEText(content[rstart:rend]) part.add_header('content-range', f'{rstart}-{rend}/{content_length}') part.add_header('content-length', str(rend-rstart)) parts.append(f'--{sep}\n{part}') response.data = '\n\n'.join(parts)
def computer(): rp=Rp() rp.headers['status'] = 200 rp.headers['Content-Type'] = "application/json ; charset=utf-8" data = { 'processor': platform.processor(), 'operating system': platform.system(), 'memory': virtual_memory().total, 'username': getpass.getuser(), } rp.data = json.dumps(data) return rp
def build_success_response(self, code, message, data): response = Response() response.data = json.dumps({ 'status': 'ok', 'code': code, 'message': message, 'result': data, }) response.status_code = code response.headers['Access-Control-Allow-Origin'] = '*' return response
def post(self): response_obj = Response(mimetype='application/json') data = get_request_data(request) role_config_data = data.get('role_config', None) if not role_config_data: return {'message': 'No input data provided'}, 400 data, errors = role_config_schema.load(role_config_data) if errors: return {"status": "error", "data": errors}, 422 else: try: role_id = data['role_id'] operation_id = data['operation_id'] role = Roles.query.filter_by( role_id=int(data['role_id'])).first() if not role: response_obj.data = json.dumps( {"msg": "please enter valid role ID"}) operation = Operations.query.filter_by( operation_id=data['operation_id']).first() if not operation: response_obj.data = json.dumps( {"msg": "please enter valid operation ID"}) else: role_config = RoleConfig(role_id, operation_id, data['is_active']) db.session.add(role_config) db.session.commit() print("hello") result_obj = role_config_schema.dump(role_config).data response_obj.data = json.dumps(({ "status": 'success', "Role_config": result_obj })) return response_obj except: print("Oops!", sys.exc_info()[0], "occured.")
def get_gears_by_type(type): response = Response(response='OK', status=200) try: types = [ 'malee_weapons', 'ranged_weapons', 'shields', 'traps_and_turrets', 'grenades', 'powers', 'amulets' ] if type not in types: response.data, response.status_code = 'Invalid gear type.', 400 else: data = current_app.scrapper_manager.get('gears') if data is None: data = current_app.scrapper_manager.get_gears() current_app.scrapper_manager.insert(data) data = data['data'][types.index(type)] response.data = json.dumps(data) response.mimetype = 'application/json' except Exception as e: current_app.logger.exception(e) response.data, response.status_code = 'Internal Server Error.', 500 finally: return response
def api_after(res: Response): if len(res.data) > 0: try: js = json.loads(res.data) js['code'] = res.status_code res.data = json.dumps(js).encode() if js['code'] != 200: logger.warning(f'response: {js}') except Exception as e: logger.error(e) logger.error(f'data: {res.data}') # print(res.data) return res
def gzip_response(self, data): """GZip response data and create new Response instance.""" gzip_buffer = BytesIO() gzip_file = gzip.GzipFile(mode='wb', compresslevel=6, fileobj=gzip_buffer) gzip_file.write(b(data)) gzip_file.close() response = Response() response.data = gzip_buffer.getvalue() response.headers['Content-Encoding'] = 'gzip' response.headers['Content-Length'] = len(response.data) return response
def trivial(): h = {key: value for key, value in request.headers} resp_data = add_nonsense_response(request.data) resp = Response() for header_name, header_value in request.headers.items(): resp.headers[header_name] = header_value resp.data = resp_data save_data("trivial", request.data, resp_data) ds.add_to_dataset(request.data) return resp
def __metrics(self): start_time = time.time() @after_this_request def to_do_after_this_request(response): record_log(request, response, logger=self.__http_logger) self.__record_metrics(start_time, request, response) return response resp = Response() try: resp.status_code = HTTPStatus.OK resp.content_type = CONTENT_TYPE_LATEST resp.data = generate_latest(self.__metrics_registry) except Exception as ex: resp.status_code = HTTPStatus.INTERNAL_SERVER_ERROR resp.content_type = 'text/plain; charset="UTF-8"' resp.data = '{0}\n{1}'.format(resp.status_code.phrase, resp.status_code.description) self.__logger.error(ex) return resp
def sparse_overlap(): params = {"tfidf": True, "use_overlap": True} sparse_linker.set_params(params) response = Response() for header_name, header_value in request.headers.items(): response.headers[header_name] = header_value response.data = sparse_linker.link_ttl(request.data) save_data("sparse_overlap", request.data, response.data) return response
def host_response(): if not request.json: abort(400) action = str(request.json['action']) target = request.json['target'] if action == 'blacklist': utils.flush_ipset_list('blacklist-user') for ip in target: with lock: if (len(ip) >= 7) and (ip not in homenet.user_blacklist) and ( ip not in homenet.user_whitelist): homenet.user_blacklist.append(ip) utils.add_ip_ipset_blacklist(ip, 'blacklist-user') print "Added ", ip resp = Response() resp.status_code = 200 return resp elif action == 'unblock': for ip in target: with lock: if (len(ip) >= 7) and (ip in homenet.user_blacklist): utils.del_ip_ipset_blacklist(ip, 'blacklist-user') resp = Response() resp.status_code = 200 return resp elif action == 'whitelist': for ip in target: if len(ip) >= 7: utils.del_ip_ipset_blacklist(ip, 'blacklist') utils.del_ip_ipset_blacklist(ip, 'blacklist-user') with lock: if ip not in homenet.user_whitelist: homenet.user_whitelist.append(ip) resp = Response() resp.status_code = 200 return resp elif action == 'list': data = utils.list_ipset_blacklist(target) data = {'content': data[7:-1]} data = json.dumps(data) resp = Response() resp.data = data resp.status_code = 200 resp.mimetype = "application/json" return resp else: abort(400)
def test_11_extended_body_tags(self): # setup realms self.setUp_user_realms() r = add_smtpserver(identifier="myserver", server="1.2.3.4", tls=False) self.assertTrue(r > 0) smtpmock.setdata(response={"*****@*****.**": (200, "OK")}, support_tls=False) g = FakeFlaskG() audit_object = FakeAudit() audit_object.audit_data["serial"] = "123456" g.logged_in_user = {"user": "******", "role": "admin", "realm": ""} g.audit_object = audit_object builder = EnvironBuilder(method='POST', data={'serial': "OATH123456"}, headers={}) env = builder.get_environ() # Set the remote address so that we can filter for it env["REMOTE_ADDR"] = "10.0.0.1" g.client_ip = env["REMOTE_ADDR"] req = Request(env) req.all_data = {"serial": "SomeSerial", "user": "******"} req.User = User("cornelius", self.realm1) resp = Response() resp.data = """{"result": {"value": true}, "detail": {"registrationcode": "12345678910"} } """ options = { "g": g, "request": req, "response": resp, "handler_def": { "conditions": { "serial": "123.*" }, "options": { "body": "your {registrationcode}", "emailconfig": "myserver" } } } un_handler = UserNotificationEventHandler() res = un_handler.do("sendmail", options=options) self.assertTrue(res)
def after_request(response): response.direct_passthrough = False if (not cache or # Content-Encoding be set if this is a cached response from before_request 'Content-Encoding' in response.headers or request.method != 'GET' or not (200 <= response.status_code < 300) or len(response.data) < 512 or not any( response.mimetype.startswith(r) for r in ['text/', 'application/javascript']) or 'gzip' not in request.headers.get('Accept-Encoding', '').lower() or # FIXME - HORRIBLE do this with decorators request.path.startswith('/peer_assess') or request.path.startswith('/view_marking') or request.path.startswith('/submission') or request.path.startswith('/survey') or request.path.startswith('/preference')): #print(request.path, 'not caching', response.status_code, response.mimetype, response.headers) return response gzip_buffer = IO() gzip_file = gzip.GzipFile(mode='wb', fileobj=gzip_buffer) gzip_file.write(response.data) gzip_file.close() cached_response = Response(status=response.status, content_type=response.content_type, mimetype=response.mimetype, headers=response.headers) cached_response.data = gzip_buffer.getvalue() cached_response.headers['Content-Encoding'] = 'gzip' cached_response.headers['Vary'] = 'Accept-Encoding' cached_response.headers['Content-Length'] = len(cached_response.data) if 'Cache-Control' in response.headers: if request.path.startswith('/static'): # file in /static/ which should have a query fragment with it size to invalidate caching if it changes # so give long expiry time timeout = 365 * 24 * 60 * 60 else: timeout = 12 * 60 * 60 elif request.path.startswith('/lab/') or request.path.startswith( '/assignment/'): # labs and assignments have autotest summaries updated regularly # solimit cache time to 1 hour timeout = 60 * 60 else: timeout = 3 * 60 * 60 response.cache_control.public = True response.cache_control.max_age = timeout cache_key = str(is_tutor()) + str(request.path) cached_response.headers['X-Cache-Key'] = cache_key # for debugging #print('cache set', cache_key, response.headers['Content-Type']) cache.set(cache_key, cached_response, timeout=timeout) return cached_response
def get_inventory(compatibility, ecoinvent_version, job_key, software): response = Response() response.status_code = 200 d_uuids = get_list_uuids_countries() if job_key in d_uuids.keys(): fp = r'data/inventories/quick_inventory_{}_{}_{}_{}.pickle'.format( d_uuids[job_key], software, ecoinvent_version, compatibility) pickled_data = open(fp, 'rb') data = pickle.load(pickled_data) pickled_data.close() else: job = Job.fetch(job_key, connection=conn) export = job.result[1] data = export.write_lci_to_excel(ecoinvent_version=ecoinvent_version, ecoinvent_compatibility=compatibility, software_compatibility=software, export_format="string") response.data = data if software == "brightway2": file_name = "carculator_inventory_{}_for_ei_{}_{}.xlsx".format( str(datetime.date.today()), ecoinvent_version, software) else: file_name = "carculator_inventory_{}_for_ei_{}_{}.csv".format( str(datetime.date.today()), ecoinvent_version, software) mimetype_tuple = mimetypes.guess_type(file_name) response_headers = { "Pragma": "public", # required, "Expires": "0", "Cache-Control": "must-revalidate, post-check=0, pre-check=0", "Content-Type": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", "Content-Disposition": 'attachment; filename="%s";' % file_name, "Content-Transfer-Encoding": "binary", "Content-Length": len(response.data), } if not mimetype_tuple[1] is None: response.update({"Content-Encoding": mimetype_tuple[1]}) response.headers.update(response_headers) return response
def delete(self, bill_id=None): """ :param bill_id: It is use for customer bill id :return: """ """ delete bill """ response_obj = Response(mimetype='application/json') try: if bill_id: bill = Bill.query.get_or_404(int(bill_id)) db.session.delete(bill) db.session.commit() logger.info("Delete Bill: Bill deleted successfully.") response_obj.data = json.dumps({"msg":"Bill deleted successfully"}) print("Bill deleted") response_obj.status_code = 200 else: db.session.query(Bill).delete() response_obj.data = json.dumps({"msg": "All Bill deleted successfully"}) db.session.commit() except DatabaseError as de: logger.error("Delete order: Error while deleting order " + str(de)) response_obj.data = json.dumps({ "err_msg": "Bill ID doesn't exist" }) response_obj.status_code = 400 except Exception as e: logger.error("Delete Bill: Error while processing request.\n" + str(e) + "\n" + str(traceback.print_exc())) response_obj.data = json.dumps({ "err_msg": "Bill ID doesn't exist" }) response_obj.status_code = 400 return response_obj
def update_config(): if request.method == 'POST': message = json.loads(request.get_data(as_text=True)) token = message["token"] result = confirm_admin_token(token) # 没有每个人唯一的Key response = Response() response.headers['Content-Type'] = 'application/json' response.headers['Access-Control-Allow-Origin'] = '*' response.headers['Access-Control-Allow-Headers'] = '*' if request.method == 'OPTIONS': return response return_json = {'code': 1, 'message': '网络异常', 'data': None} response.data = return_msg(return_json) if result == False: return response try: referer = request.headers.get('Referer', None) if referer is not None: referer = referer.replace('admin', 'index') message['frontend_url'] = referer if message.get('username'): # email admin address message['server_address'] = get_smtp_url(message.get('username')) # email smtp address orgconfig = utils.get_org_config() emailconfig = utils.get_email_config() for domain in message: if domain == "token" or len(str(message[domain])) == 0: continue if domain in orgconfig: orgconfig[domain] = message[domain] elif domain in emailconfig: emailconfig[domain] = message[domain] utils.update_org_config(orgconfig) utils.update_email_config(emailconfig) except Exception as e: logger.error(e) return_json = {'code': 0, 'message': 'update config successfully', 'data': None} response.data = return_msg(return_json) return response
def paper_file( the_file ): r = Response() mc = get_minio_client() r.mimetype=mimetypes.guess_type(the_file.path)[0] try: f = mc.get_object(os.getenv('MINIO_BUCKET'), the_file.path) r.data = f.read() except Exception as e: print(e) finally: f.close() f.release_conn() return r
def crimeByMonth(): url = 'http://localhost:8080/cloudmesh/crime_finder/crimes/byday?no_of_types=10' responseR = requests.get(url, headers={"Content-Type": "application/json"}) #data = json.dumps(responseR.json()) # Response object response = Response() response.headers["status"] = 200 response.headers["Content-Type"] = "application/json; charset=utf-8" data = json.dumps(responseR.json()) response.data = data return response
def crimeByYear(): url = swaggerHostName + '/cloudmesh/crime_finder/crimes/byyear?no_of_types=10' responseR = requests.get(url, headers={"Content-Type": "application/json"}) #data = json.dumps(responseR.json()) # Response object response = Response() response.headers["status"] = 200 response.headers["Content-Type"] = "application/json; charset=utf-8" data = json.dumps(responseR.json()) response.data = data return response
def raw_download(logged = False): if request.method == 'POST': league = request.values.get('league') group = request.values.get('group') year = request.values.get('year') response = Response() response.status_code = 200 workbook = generate_xls.generate_xls_raw(league, group, year) output = StringIO.StringIO() workbook.save(output) response.data = output.getvalue() filename = 'results_raw.xls' mimetype_tuple = mimetypes.guess_type(filename) response_headers = Headers({ 'Pragma': "public", # required, 'Cache-Control': 'private', # required for certain browsers, 'Content-Type': mimetype_tuple[0], 'Content-Disposition': 'attachment; filename=\"%s\";' % filename, 'Content-Transfer-Encoding': 'binary', 'Content-Length': len(response.data) }) if not mimetype_tuple[1] is None: response.update({ 'Content-Encoding': mimetype_tuple[1] }) response.headers = response_headers response.set_cookie('fileDownload', 'true', path='/') return response else: if 'login' in session: logged = True labels = labels_getter(session) return render_template('raw_download.html', leagues = labels.get('leagues'), logged = logged) return render_template("index.html")
def allinfo(): allinfo = { "Processor name": platform.processor(), "Total Disk": list(psutil.disk_usage('/'))[0], "Total RAM": list(psutil.virtual_memory())[0], "Free Disk": list(psutil.disk_usage('/'))[2] } sdata = json.dumps(allinfo) response = Response() response.headers["status"] = 200 response.headers["Content-Type"] = "application/json; charset=utf-8" response.data = sdata return response
def update(): # 视图函数 从request中接收到的值是bytes 字节码,需要decode('utf8')用utf-8解码 response = Response() if request.method == 'POST': containt = request.data.decode('utf8') containt = eval(containt) print(json.dumps(containt['json'])) print(sc.update_object_sql(json.dumps(containt['json']), containt['info']['db'], containt['info']['table'])) print(sc.commit_all()) response.data = "成功" response.status_code = 200 return response else: return 'way -> OPTIONS'
def user_detail(): app.logger.info('Start Fetching User') token = request.headers['Authorization'] user = database.user app.logger.info('Start Fetching User Inner') res = get_user_detail(token, user) response = Response(headers=RESPONSE_HEADERS, content_type='application/json') app.logger.info('End Fetching User Inner') response.data = res[0] response.status_code = res[1] app.logger.info('End Fetching User') return response
def handle_response(response: FlaskResponse) -> FlaskResponse: response.headers["_RV"] = str(version) PROJECT_VERSION = get_project_configuration("project.version", default="0") if PROJECT_VERSION is not None: response.headers["Version"] = str(PROJECT_VERSION) data_string = get_data_from_request() url = obfuscate_query_parameters(request.url) if (GZIP_ENABLE and not response.is_streamed and "gzip" in request.headers.get("Accept-Encoding", "").lower()): response.direct_passthrough = False content, headers = ResponseMaker.gzip_response( response.data, response.status_code, response.headers.get("Content-Encoding"), response.headers.get("Content-Type"), ) if content: response.data = content response.headers.update(headers) resp = str(response).replace("<Response ", "").replace(">", "") ip = BaseAuthentication.get_remote_ip(raise_warnings=False) is_healthcheck = (ip == "127.0.0.1" and request.method == "GET" and url == "/api/status") if is_healthcheck: log.debug( "{} {} {}{} -> {} [HEALTHCHECK]", ip, request.method, url, data_string, resp, ) else: log.info( "{} {} {}{} -> {}", ip, request.method, url, data_string, resp, ) return response
def info(self, data): response = Response() text = """ <html> <head> <style> * { font-size: 1.1em; background: white; } %s </style> """ if os.path.isdir(request.path): text += """ <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.min.css"> <script src="https://ajax.googleapis.com/ajax/libs/jquery/3.4.1/jquery.min.js"></script> <script> $(function () { $.getJSON(encodeURI("%s?extradirinfo"), function (data, status, xhr) { $("body>b").each(function() { var key = $(this).text().slice(0, -1) if (key in data) { $(this).next().html(data[key]) } }) }) }) </script> """ % request.path text += """ </head> <body> """ for key, value in data: text += """ <b>%s:</b> <span>%s</span><br/> """ % (key, value) text += """ </body> </html> """ response.data = text return response
def show_collections(): resp = Response() query = get_query_from_request_args(request.args) if query: time = formatdate() results = get_results(dataset_url, query) if len(results): resp.headers[ b'Content-Type'] = b'application/rss+xml;charset=utf-8' resp.data = render_template('feed.xml', api_server=api_server, api_path=api_path, query=query, results=results, time=time) else: resp.status_code = 404 resp.data = 'Your query did not retrieve any records.' else: resp.status_code = 404 resp.data = 'You must supply either a "table" parameter or a "query" parameter in your query string' return resp
def box_identify(): br = boxrouterManager() nm = networkManager() response = Response() response.headers['Access-Control-Allow-Origin'] = request.headers['Origin'] response.data = json.dumps({ 'id': br.boxId, 'name': nm.getHostname(), 'version': VERSION }) return response
def token(): message = request.args.get('token') person_info = confirm_token(message) if person_info and person_info['status'] == 1: return_json = {'code': 0, 'data': person_info,'message': 'success'} elif person_info and person_info['status'] > 1: return_json = {'code': 1, 'data':'','message': 'You have submitted your information successful, please check your email'} else: return_json = {'code': 1, 'data':'','message': 'User does not exist'} response = Response() response.headers['Access-Control-Allow-Origin'] = '*' response.headers['Access-Control-Allow-Headers'] = '*' response.data = return_msg(return_json) return response
def test_03_sendsms(self): # setup realms self.setUp_user_realms() r = set_smsgateway(identifier="myGW", providermodule="privacyidea.lib.smsprovider." "SmtpSMSProvider.SmtpSMSProvider", options={ "SMTPIDENTIFIER": "myserver", "MAILTO": "*****@*****.**" }) self.assertTrue(r > 0) smtpmock.setdata(response={"*****@*****.**": (200, "OK")}, support_tls=False) g = FakeFlaskG() audit_object = FakeAudit() audit_object.audit_data["serial"] = "123456" g.logged_in_user = {"user": "******", "role": "admin", "realm": ""} g.audit_object = audit_object builder = EnvironBuilder(method='POST', data={'serial': "OATH123456"}, headers={}) env = builder.get_environ() # Set the remote address so that we can filter for it env["REMOTE_ADDR"] = "10.0.0.1" g.client_ip = env["REMOTE_ADDR"] req = Request(env) req.all_data = {"serial": "SomeSerial", "user": "******"} req.User = User("cornelius", self.realm1) resp = Response() resp.data = """{"result": {"value": true}}""" options = { "g": g, "request": req, "response": resp, "handler_def": { "options": { "smsconfig": "myGW" } } } un_handler = UserNotificationEventHandler() res = un_handler.do("sendsms", options=options) self.assertTrue(res)
def get_gear_by_name(name): response = Response(response='OK', status=200) try: name_exists = False data = current_app.scrapper_manager.get('gears') if data is None: data = current_app.scrapper_manager.get_gears() current_app.scrapper_manager.insert(data) for weapon_type in data['data']: for gear in weapon_type['gears']: if gear['name'] == name: gear['type'] = weapon_type['type'] response.data = json.dumps(gear) response.mimetype = 'application/json' name_exists = True break if not name_exists: response.data, response.status_code = 'Gear not found.', 404 except Exception as e: current_app.logger.exception(e) response.data, response.status_code = 'Internal Server Error.', 500 finally: return response