def kmedoids(context): rawData = getDataFromSvr(context['rsrc']) if(context['distinct']): rawData = dm.distinct(rawData) idList, data = processData(rawData, context['start'], \ context['end'], context['cols']) dataList = convertDataToArr(data) dataList = preprocess(dataList, context) _, _, rawRes = dm.kmedoids(dataList, **context['args']) conn = config.getConn() cursor = conn.cursor() hid = dbAddHistory(cursor, context, 'cluster') result = [] clusterId = 0 for medoid in rawRes.keys(): for i in rawRes[medoid]: result.append((hid, idList[i], clusterId, json.stringify(dataList[i]))) clusterId += 1 dbWriteBack(cursor, result) dbAddMessage(cursor, context, hid) try: sendMessageByEmail(cursor, context, hid) except Exception: pass conn.commit() cursor.close() conn.close() return json.stringify({'succ': True, 'msg': 'Done...'})
def kmeans(context): rawData = getDataFromSvr(context['rsrc']) if(context['distinct']): rawData = dm.distinct(rawData) idList, data = processData(rawData, context['start'], \ context['end'], context['cols']) dataList = convertDataToArr(data) dataList = preprocess(dataList, context) from sklearn.cluster import KMeans clf = KMeans(**context['args']) clf.fit(dataList) conn = config.getConn() cursor = conn.cursor() hid = dbAddHistory(cursor, context, 'cluster') result = [] for i in xrange(len(clf.labels_)): result.append((hid, idList[i], clf.labels_[i], json.stringify(data[i]))) dbWriteBack(cursor, result) dbAddMessage(cursor, context, hid) try: sendMessageByEmail(cursor, context, hid) except Exception: pass conn.commit() cursor.close() conn.close() return json.stringify({'succ': True, 'msg': 'Done...'})
def main(req: func.HttpRequest) -> func.HttpResponse: log('Generating request dump') connection_string = getenv("AZURE_STORAGE_CONNECTION_STRING") container_name = "uploads" file_name = datetime.now(timezone.utc).isoformat() request = { "url": req.url, "method": req.method, "headers": req.headers.__http_headers__ } if hasattr(req, "form") and len(req.form) > 0: request["form"] = req.form data = None if hasattr(req, "files") and len(req.files) > 0: from io import BytesIO from zipfile import ZipFile, ZIP_DEFLATED as deflated log('Creating zip file') mem_zip = BytesIO() with ZipFile(mem_zip, mode="w", compression=deflated) as zf: zf.writestr("request.txt", stringify(request)) for file in req.files: file_data = req.files[file].stream.read() zf.writestr(file, file_data) data = mem_zip.getvalue() file_name += ".zip" else: data = stringify(request).encode('utf-8') file_name += ".txt" log('Accessing blob container') blob_service_client = BlobServiceClient.from_connection_string( connection_string) container_client = blob_service_client.get_container_client(container_name) try: container_client = blob_service_client.create_container(container_name) except ResourceExistsError: pass log('Writing to blob storage') blob_client = blob_service_client.get_blob_client(container=container_name, blob=file_name) blob_client.upload_blob(data) #TODO: response rules return func.HttpResponse("OK")
def classify(context): from sklearn.neighbors import KNeighborsClassifier from sklearn.naive_bayes import MultinomialNB from sklearn.svm import SVC label = context['label'] algo = context['algo'] if algo == 'svm': classifier = SVC elif algo == 'knn': classifier = KNeighborsClassifier elif algo == 'naive_bayes': classifier = MultinomialNB else: assert False args = context['args'] for k in args: if isinstance(args[k], unicode): args[k] = str(args[k]) rawData = getDataFromSvr(context['rsrc']) if(context['distinct']): rawData = dm.distinct(rawData) labelList, train = getTrainingSet(rawData, label, context['start'], \ context['end'], context['cols']) trainList = convertDataToArr(train) trainList = preprocess(trainList, context) idList, predict = getPredictSet(rawData, context['predictStart'], \ context['predictEnd'], context['cols']) predictList = convertDataToArr(predict) predictList = preprocess(predictList, context) clf = classifier(**args) clf.fit(trainList, labelList) rawRes = clf.predict(predictList) conn = config.getConn() cursor = conn.cursor() id = dbAddHistory(cursor, context, 'classify') result = [] for i in xrange(len(rawRes)): result.append((id, idList[i], rawRes[i], json.stringify(predict[i]))) dbWriteBack(cursor, result) dbAddMessage(cursor, context, id) try: sendMessageByEmail(cursor, context, hid) except Exception: pass conn.commit() cursor.close() conn.close() return json.stringify({'succ': True, 'msg': 'Done...'})
def writetheanswer(self, works, error="[NO ERROR]"): if works: add_answer(stringify(self.answer), 1, self.id) else: self.log_error(f"[WRITE_THE_ANSWER] {error}") self.answer = { "answer": ['Couldn\'t fetch answer, please try again'], "domain": ['Error'], "success": 0 } add_answer(stringify(self.answer), -1, self.id)
def setEmail(uid): email = request.form.get('email') if email is None: email = "" if not re.match(r'^[\w\-\.]+?@(\w+?\.)+?\w{2,4}$', email): return make_response(json.stringify({"succ": False, "msg": "Email format error!"})) conn = config.getConn() cur = conn.cursor() sql = "replace into email (userid, email) values (%s,%s)" cur.execute(sql, (uid, email)) conn.commit() cur.close() conn.close() return make_response(json.stringify({"succ": True}))
def read_incoming_serial(): global serialContacted global serialConnected global isWebsocketConnected, global_websocket while True and serialConnected: buffer = '' buffer = ser.read(ser.inWaiting()) # initial handshake # if not serialContacted: # ser.write('A\n') # ser.write('\n') # serialContacted = True if buffer and buffer is not '': print buffer + " !!!!!!" if isWebsocketConnected: tmpSerialData = { 'type':'serial', 'data':buffer } global_websocket.send(json.stringify(tmpSerialData)) sleep(0.01)
def inform_server(self, x, y, color): # On forme l'objet que l'on va envoyer au serveur body = {"x": x, "y": y, "color": color} # On effectue une requête POST avec ces info et on indique au serveur # qu'il s'agit de données format JSON requests.post(PLACE_URL, stringify(body), headers=JSON_HEADERS)
def apriori(context): rawData = getDataFromSvr(context['rsrc']) _, data = processData(rawData, context['start'], \ context['end'], context['cols']) dataList = convertDataToArr(data) rawRes = dm.apriori(dataList, **context['args']) conn = config.getConn() cursor = conn.cursor() hid = dbAddHistory(cursor, context, 'assoc') result = [] count = 0 for row in rawRes: v = "{0} -> {1}".format( ', '.join(row[0]), ', '.join(row[1]) ) result.append((hid, count, row[2], v, row[3], row[4])) count += 1 dbWriteBackAssoc(cursor, result) dbAddMessage(cursor, context, hid) try: sendMessageByEmail(cursor, context, hid) except Exception: pass conn.commit() cursor.close() conn.close() return json.stringify({'succ': True, 'msg': 'Done...'})
def download(url, file): from json import dumps as stringify from shutil import copyfileobj from zipfile import ZipFile, ZIP_DEFLATED as COMPRESSION from pathlib import Path from datetime import datetime, timezone from contextlib import closing from urllib.request import urlopen file = Path(file).with_suffix(".zip") file.parent.mkdir(parents=True, exist_ok=True) if file.exists(): print(F"{file.name} exists -- skipping download") else: with closing(urlopen(url=url)) as rd: with ZipFile(file, mode='w', compression=COMPRESSION, compresslevel=9) as zf: with zf.open("", mode='w') as fd: copyfileobj(rd, fd) with zf.open("meta", mode='w') as fd: meta = { 'source': url, 'datetime': datetime.now(tz=timezone.utc).isoformat(sep=' '), } fd.write(stringify(meta).encode()) print(F"{file.name} downloaded")
def success(body = None, status = None, mime = None, headers = {}, encoded = False, raw = False): response = {} if body is not None: # JSON-stringify body if not already a string response['body'] = body if isinstance(body, str) else stringify(body) # set base64 flag for binary responses if encoded: response['isBase64Encoded'] = True # set HTTP status code, if provided if status: response['statusCode'] = status # set Content-Type header, if provided if mime: headers['content-type'] = mime # set CORS header to allow access from sigma-ui Lambda headers['access-control-allow-origin'] = 'https://emyhny0tlc.execute-api.us-west-2.amazonaws.com' # set headers and return response['headers'] = headers return response
def sendFeedback(self, fun, learning, recommend, overall): packet = [{ "channel": "/service/controller", "clientId": self.clientID, "data": { "id": 11, "type": "message", "gameid": self.gameID, "host": consts.ENDPOINT_URI, "content": JSON.stringify({ "totalScore": self.kahoot.totalScore, "fun": fun, "learning": learning, "recommend": recommend, "overall": overall, "nickname": self.kahoot.name }) }, "id": str(self.msgID) }] self.msgID += 1 time.sleep(1) self.send(packet)
def getResultById(id): conn = config.getConn() cur = conn.cursor() result = dbGetResult(cur, id) cur.close() conn.close() res = make_response(json.stringify({"succ": True, "data": result})) return res
def getEmail(uid): conn = config.getConn() cur = conn.cursor() email = dbGetEmail(cur, uid) conn.commit() cur.close() conn.close() return make_response(json.stringify({"succ": True, "data": email}))
def notify(uid): conn = config.getConn() cur = conn.cursor() sql = "select count(*) from message where userid=%s and isread=0" cur.execute(sql, (uid,)) num = cur.fetchall()[0][0] cur.close() conn.close() return make_response(json.stringify({"succ": True, "unread": num}))
def getHistoryById(uid, id): conn = config.getConn() cur = conn.cursor() result = dbGetHistory(cur, uid, id) cur.close() conn.close() res = make_response(json.stringify({"succ": True, "data": result})) res.headers["Content-Type"] = "application/json" return res
def insert(table, data): last = False qstr, key_list, values = _insert(table, data) try: res, last = run(qstr, values, with_last=True) except BaseException: _log.warn("Unable to insert a record {}: {}".format( qstr, json.stringify(values))) return last
def lambda_handler(event, context): http = urllib3.PoolManager() for record in event['Records']: print("test start") payload = record["body"] print(str(payload)) r = http.request('POST', 'https://{{api_url}}', body=payload, headers={'Content-Type': 'application/json'}) if r.status != 200: context.fail(json.stringify(r.data))
def markMessage(uid, id): conn = config.getConn() cur = conn.cursor() sql = "update message set isread=1 where userid=%s" if id != 0: sql += " and id=" + str(id) cur.execute(sql, (uid,)) conn.commit() cur.close() conn.close() return make_response(json.stringify({"succ": True}))
def callSendAPI(messageData): body = json.stringify(messageData) path = "graph.facebook.com" + '/v2.6/me/messages?access_token=' + PAGE_ACCESS_TOKEN options = { 'host': "graph.facebook.com", 'path': path, 'method': 'POST', 'headers': { 'Content-Type': 'application/json' } } x = requests.post(path, body) console.log(x)
def savelist(): it = request.json.get('item') quant = request.json.get('quantity') prior = request.json.get('priority') sto = request.json.get('store') sec = request.json.get('section') pri = request.json.get('price') return json.stringify({ item: it, quantity: quant, priority: prior, store: sto, section: sec, price: pri })
def iris(token, proj): from sklearn import datasets iris = datasets.load_iris() r = [] for i in xrange(len(iris.data)): row = iris.data[i] elem = { "id": i, "col0": int(row[0] * 10) / 10.0, "col1": int(row[1] * 10) / 10.0, "col2": int(row[2] * 10) / 10.0, "col3": int(row[3] * 10) / 10.0, "label": iris.target[i] } r.append(elem) return json.stringify({'Iris': r})
def cart(token, proj): from dm import loadCart cart = loadCart() r = [] id = 0 for row in cart: elem = { "id": id, "col0": row[0], "col1": row[1], "col2": row[2], "col3": row[3] } r.append(elem) id += 1 return make_response(json.stringify({"Cart": r}))
def login(): user = request.form['username'] password = request.form['password'] connection = pg.connect("host='" + DB['host'] + "' dbname=" + DB['dbname'] + " user="******" password='******'password'] + "'") df = pd.read_sql_query("select user_id from users where username='******' and password='******';", con=connection) resp = { 'success': True, 'user_id': 1 } if len(df) == 1 else { 'success': False, 'message': 'invalid credentials' } return Response(response=json.stringify(resp), status=200)
def serialize(so, val, no_marker=None): keys = [] for key in val: # Ignore lowercase field names - they're non-serializable fields by # convention. if key[0] == key[0].lower(): return #Check the field if not INVERSE_FIELDS_MAP.__contains__(key): raise Exception('JSON contains unknown field: "' + key + '"') keys.append(key) # Sort fields keys = sort_fields(keys) # store that we're dealing with json isJson = (val.__contains__('MemoFormat') and val.MemoFormat == 'json') i = 0 while i < len(keys): key = keys[i] value = val[key] if key == 'MemoType' or key == 'MemoFormat': # MemoType and MemoFormat are always ASCII strings value = stringToHex(value) # MemoData can be a JSON object, otherwise it's a string elif key == 'MemoData': if not isinstance(value,str): if (isJson): try: value = stringToHex(json.stringify(value)) except Exception: raise Exception('MemoFormat json with invalid JSON in MemoData field') else: raise Exception('MemoData can only be a JSON object with a valid json MemoFormat') elif isinstance(value,str): value = stringToHex(value) serialize(so, key, value) i += 1 if (not no_marker): #Object ending marker STInt8.serialize(so, 0xe1)
def getMessage(uid, isread): conn = config.getConn() cur = conn.cursor() sql = "select id,content,tm,isread from message where userid=%s" if isread == 1: sql += " and isread=1" elif isread == 0: sql += " and isread=0" sql += " order by id desc" cur.execute(sql, (uid,)) result = cur.fetchall() cur.close() conn.close() newResult = [] for row in result: obj = { "id": row[0], "content": row[1], "tm": str(row[2]), "isread": 1 if row[3] else 0 } newResult.append(obj) return make_response(json.stringify({"succ": True, "data": newResult}))
# for i in textacy.keyterms.textrank(doc, normalize='lemma'): # print (i) def cleanup(s): strip_refs = re.compile("\.?\[\d+\]?") s = strip_refs.sub("", s).strip() s = re.sub(r'\[[0-9]*\]', ' ', s) s = re.sub(r'\s+', ' ', s) if s[-1] == ".": s = s[0:-1] return s #print (l) m = [] for i in l: statements = textacy.extract.semistructured_statements(doc, i) for statement in statements: subject, verb, fact = statement fact = cleanup(str(fact)) #print(fact) m.append(fact) #print (m) i = (json.dumps({ARTICLE: m})) r = ((json.loads(i))) print(json.stringify(r)) # print("Did you know this...") # for statement in statements: # subject, verb, fact = statement # fact = cleanup(str(fact)) # print(fact)
def __str__(self): return 'Array({content})'.format(content=json.stringify(content))
def parallel_load_image_tensor(self, image_file_paths, output_image_dimensions, loading_config): standarized_images = [ ] # np.array([]).reshape([0, output_image_dimensions[0], output_image_dimensions[1], 3]) is_successful_array = [] tasks = self.construct_parallel_task_arguments( image_file_paths, output_image_dimensions) with ProcessPool( max_workers=loading_config["multi_core_count"]) as pool: future = pool.map(parallel_load_and_standardize_image, tasks, timeout=loading_config["timeout_secs"], chunksize=loading_config["chunk_size"]) iterator = future.result() i = 0 while True: try: standardized_image = None standardized_image, is_successful = next(iterator) if not is_successful: print("returned unsuccessfully") raise Exception("result returned false") if standardized_image is None: raise Exception("No image returned") if standardized_image.shape[1:3] != ( output_image_dimensions[0], output_image_dimensions[1]): raise Exception("dimension mismatch") if standardized_image.shape[3] != 3: raise Exception("not RGB image") standarized_images.append(standardized_image) is_successful_array.append(is_successful) except StopIteration: break except Exception as e: print("exception") print(e) print(image_file_paths[i]) if (standardized_image is not None) and hasattr( standardized_image, 'shape'): print("target dimensions: " + json.stringify(output_image_dimensions)) print("actual dimensions: " + json.stringify(standardized_image.shape)) is_successful_array.append(False) print("RESULTS PROCESSED = " + str(i + 1) + " / " + str(len(list(image_file_paths)))) i = i + 1 print("finished standardizing images") print("total images retrieved = " + str(len(standarized_images))) standarized_images = np.concatenate(standarized_images) is_successful_array = np.array(is_successful_array) return standarized_images, is_successful_array
def output_json(data, code, headers=None): from json import dumps as stringify resp = make_response(stringify(data), code) resp.headers.extend(headers or {}) return resp
def login(): return json.stringify({"error": None, "id": 233837063867287})
def mining(uid, token, proj, rsrc): res = make_response() res.headers['Content-Type'] = "application/json" #获取参数 for e in [token, proj, rsrc]: if not re.match(r'^\w+$', e): res.data = json.stringify({'succ': False, 'msg': 'Rsrc invalid!'}) return res # title title = request.form.get('title') if not title: res.data = json.stringify({'succ': False, 'msg': 'No title!'}) return res # cols=["col0","col1","col2", ...] cols = request.form.get('cols') if cols: cols = json.parse(cols) assert isinstance(cols, list) else: cols = [] # start start = request.form.get('start') if start: start = int(start) else: start = 0 # count count = request.form.get('count') if count: count = int(count) end = start + count else: end = None algo = request.form.get('algo') args = request.form.get('args') print args if args: args = json.parse(args) assert isinstance(args, dict) else: args = {} # TODO: filter args context = { "user": uid, "title": title, "rsrc": token + '/' + proj + '/' + rsrc, "cols": cols, "start": start, "end": end, "algo": algo, "args": args, } if isClassify(algo): # predictStart predictStart = request.form.get('predictStart') if predictStart: predictStart = int(predictStart) else: predictStart = 0 # predictCount predictCount = request.form.get('predictCount') if predictCount: predictCount = int(predictCount) predictEnd = predictStart + predictCount else: predictEnd = None label = request.form.get('label') if label is None: label = "" if not re.match(r'^\w+$', label): return json.stringify({'succ': False, 'msg': 'Label invalid!'}) context['predictStart'] = predictStart context['predictEnd'] = predictEnd context['label'] = label if not isAssoc(algo): absence = request.form.get('absence') fillval = request.form.get('fillval') if fillval is None: fillval = 0 formal = request.form.get('formal') distinct = request.form.get('distinct') == 'true' context['absence'] = absence context['fillval'] = fillval context['formal'] = formal context['distinct'] = distinct # 调用具体算法 funcDict = { "kmeans": kmeans, "kmedoids": kmedoids, "apriori": apriori, "naive_bayes": classify, "knn": classify, "svm": classify } func = funcDict.get(algo) if not func: res.data = json.stringify({'succ': False, 'msg': 'Unknown algo!'}) return res Thread(target=func, args=(context,)).start(); res.data = json.stringify({'succ': True, 'msg': 'Done...'}) return res
import json # Note: It's not possible to return 'undefined' and drop an attribute. def replacer(key, value): if key == '': return value elif key == 'height': return value * 100 elif key == 'favorite': return None else: return value x = json.parse( '{"name":"David", "height":1.8542, "favorite":6, "male":true, "other":null}' ) print json.stringify(x) print json.stringify(x, None) print json.stringify(x, None, 5) print json.stringify(x, replacer, 5) # This gives a TypeError 'function' does not support indexing #x = json.parse['["David"]']
import json # Note: It's not possible to return 'undefined' and drop an attribute. def replacer(key, value): if key == '': return value elif key == 'height': return value * 100 elif key == 'favorite': return None else: return value x = json.parse('{"name":"David", "height":1.8542, "favorite":6, "male":true, "other":null}') print json.stringify(x) print json.stringify(x, None) print json.stringify(x, None, 5) print json.stringify(x, replacer, 5) # This gives a TypeError 'function' does not support indexing #x = json.parse['["David"]']
if response.status_code != 201: print "Error: posting plugin failed." print response.text; #print plugincode; plugincode = "http().get(function(request){ return {status: 200, content: 'Hello World!'};});"; pluginname = "instapp.asd"; file = open('instapp.enabled', 'r') plugincode = file.read() plugincode = json.stringify(plugincode) print plugincode addPlugin(pluginname , plugincode, "true", "javascript", token); def putPlugin( name, code, token ): headers = { "X-BB-SESSION": token, "content-type": "application/json" } url = "http://localhost:9000/admin/plugin/" +name payload = '{ "code": "' +code +'"}' response = requests.put( url, data = payload, headers = headers ) if response.status_code != 201: print "Error: putting plugin failed." print response.text;