def go(): #次窗口 MongoDB.extraAllCaption2txt() window=tk.Tk() window.title("次窗口") window.geometry("600x800") #单词表按钮 button3=tk.Button(window,text="单词表",font=("Arial",12),width=40,height=5,command=Graph.makeDict) button3.place(x=100,y=150) #词云图按钮 button4=tk.Button(window,text="词云图",font=("Arial",12),width=40,height=5,command=Graph.word_cloud) button4.place(x=100,y=300) v=tk.StringVar() e=tk.Entry(window,textvariable=v,width=25) #取Entry的值函数 e.place(x=100,y=460) def histograph(): Graph.paintDiag(int(e.get()),12) #柱状图按钮 label1=tk.Label(window,text="请填写单词长度") label1.place(x=100,y=435) button5=tk.Button(window,text="柱状图",font=("Arial",12),width=7,height=3,command=histograph)#!!!!!!!!!!!!!!!!!!!! button5.place(x=400,y=450) #提交图按钮 window.mainloop()
class MQTT_OTP_Subscriber: def __init__(self, topic): self.limit = 0 # 최대 인증 시도 5회까지 check self.result_msg = None # otp 인증 결과 self.topic = topic # mongoDB 객체 생성해서 OTP 결과값 저장 self.mongo = MongoDB() client = mqtt.Client() client.connect("localhost", 1883, 60) client.on_connect = self.on_connect client.on_message = self.on_message client.loop_start() # self.topic 구독하기 def on_connect(self, client, userdata, flags, rc): print("터틀봇의 답변을 기다리고 있습니다 ▼") client.subscribe(self.topic) # 메세지 대기 모드 def on_message(self, client, userdata, msg): input_data = msg.payload.decode() answer_lst = {"1": "Success", "0": "Fail", "3": "Time_Over"} self.result_msg = answer_lst[input_data] if input_data == "0": self.limit += 1 if self.limit == 5: self.result_msg = "Real Fail" print("Result ▶ ", self.result_msg) self.mongo.storeStr_otp(self.result_msg) # MongoDB에 OTP 결과 저장
def createDataBaseCIDs(): """ Cria a base de CIDs que serão utilizados function() """ with open('cid-10.txt', 'r+') as f: grouping = '' for line in f.readlines(): line = line.strip() s = line.split(') - ') s[0] = s[0].replace('(','') if '-' in s[0]: group = s[0] group_description = s[1] if '-' not in s[0] and len(s) > 1: d = {} d['type'] = 'cid' d['version'] = 10 d['code'] = s[0] d['neoplasms'] = s[1] d['neoplasms_group'] = group d['neoplasms_group_description'] = group_description # print(d) MongoDB.insert_item(d) f.closed
def getDataMacroRegion(): """ Retorna um dictionary com todas as micro-regioes function() -> dictionary """ # from scipy.spatial.distance import euclidean dic = {} lista = MongoDB.list_all_macro_regions() for i in lista: d = {} if i['region'] not in dic: dic[i['region']] = {} for j in lista: if i['region'] != j['region']: # d[j['region']] = float("{0:.4f}".format(euclidean( [i['lng'], i['lat']], [ j['lng'], j['lat'] ]))) d[j['region']] = float("{0:.3f}".format(haversine(i['lng'], i['lat'], j['lng'], j['lat'] ))) dic[i['region']] = d lista = MongoDB.list_all_macro_regions() for i in lista: dic[i['region']]['Price/m²'] = float("{0:.2f}".format(i['price_by_meter'])) dic[i['region']]['Residents'] = int(i['residents']) dic[i['region']]['Occupied Private Housing'] = int(i['occupied_private_housing']) dic[i['region']]['Residents Per Household'] = float("{0:.2f}".format(i['residents_per_household'])) dic[i['region']]['Hospital'] = int(i['hospital']) dic[i['region']]['UBS'] = int(i['ubs']) dic[i['region']]['UPA'] = int(i['upa']) dic[i['region']]['University'] = int(i['university']) dic[i['region']]['Bank'] = int(i['bank']) dic[i['region']]['Shopping'] = int(i['shopping']) dic[i['region']]['Delegacy'] = int(i['delegacy']) dic[i['region']]['Market'] = int(i['market']) dic[i['region']]['School'] = int(i['school']) return dict(sorted(dic.items()))
class Find_person: def __init__(self): # 필요한 객체 생성 self.otp_client = MQTT_OTP_Subscriber("otp_result") self.yolo_checker = Yolo_checker() self.mongo = MongoDB() # 사람을 확인했는지 확인하는 변수 self.flag = 0 # 터틀봇 영상 가져오기 self.cap = cv2.VideoCapture( 'http://192.168.0.32:8080/stream?topic=/usb_cam/image_raw') def check_person(self): try: while True: ret, frame = self.cap.read() # 1..... yolo로 사람이 발견되면 otp 인증을 한다. if self.yolo_checker.isPerson(frame) and self.flag == 0: self.flag = 1 print( " Find person ---------> OTP ON " ) print( "___________________ 사람을 발견했습니다. OTP 인증 시도 ___________________" ) # 1-0 .... TurtleBot 에게 otp 시작 알림 import paho.mqtt.client as mqtt mqtt = mqtt.Client("OTP") # MQTT client 생성, 이름 "" mqtt.connect("localhost", 1883) # 로컬호스트에 있는 MQTT서버에 접속 mqtt.publish("otp_start", json.dumps({"data": "start"})) # topic 과 넘겨줄 값 # 2..... OTP 인증 Mode start!! --- (MQTT) 답변 받기 및 메세지 분석 otp_result = self.otp_client.result_msg if otp_result == "Success": print("▶ OTP 인증 성공 ") return "Success" elif otp_result == "Time_Over": print("▶ OTP 시간 초과 ") self.mongo.storeImg_otp(frame, 'intruder.jpg') # 넘길 이미지와 이름 return "Time_Over" # 다시 드론으로부터 이미지 받아서 추적 if self.otp_client.limit == 5: # 2-1.... 인증시도 5회 만료시, 현장 사진 mongoDB 저장 print("▶ OTP 관리자 확인 요망 ") self.mongo.storeImg_otp(frame, 'intruder.jpg') # 넘길 이미지와 이름 return "Real_Fail" # 다시 드론으로부터 이미지 받아서 추적 # .......... 디버깅용 # cv2.imshow("origin", frame) # if cv2.waitKey(1) & 0xFF == ord('q'): # break except: pass
def __init__(self): # 필요한 객체 생성 self.otp_client = MQTT_OTP_Subscriber("otp_result") self.yolo_checker = Yolo_checker() self.mongo = MongoDB() # 사람을 확인했는지 확인하는 변수 self.flag = 0 # 터틀봇 영상 가져오기 self.cap = cv2.VideoCapture( 'http://192.168.0.32:8080/stream?topic=/usb_cam/image_raw')
def analysis(request): if request.method == 'POST': num_centroids = int(request.POST.get('num_centroids')) num_points = int(request.POST.get('num_points')) pc = MongoDB.list_all_postal_codes() list_analysis = [] i = 0 while i < num_points: r = random.randint(0, (len(pc)-1)) list_analysis.append(pc.pop(r)) i += 1 points, centroids = k_means.k_means_lists(num_centroids, 20, list_analysis) dic = k_means.sse(points, centroids) regions, table = viewLists.listTableMoreInputs() t = get_template('analysis.html') html = t.render( Context( { 'regions' : regions, 'table': table, 'center' : "-23.2062436,-45.900007" , 'localizations' : points, 'colors' : ColorsRandom.generate_colors(num_centroids), 'centroids' : centroids, 'sse' : dic, 'hospitals' : MongoDB.list_all_healths(), 'csrf_token' : csrf(request)['csrf_token'] } ) ) html = html.replace("'", "'") return HttpResponse(html) # return render_to_response( # 'index.html', # context_instance=RequestContext(request)) regions, table = viewLists.listTableMoreInputs() t = get_template('index.html') html = t.render( Context( { 'regions' : regions, 'table': table, 'csrf_token' : csrf(request)['csrf_token'] } ) ) html = html.replace("'", "'") return HttpResponse(html)
def make_answer(user_id, user_text, user_answer_step=0): if user_answer_step == 0: requests.post(TOKEN + 'sendMessage', data={ 'chat_id': user_id, 'text': '"' + user_text + '"' + 'This is your remind' }) set_remind_text(user_id, user_text) requests.post(TOKEN + 'sendMessage', data={ 'chat_id': user_id, 'text': 'Write plz the date, like "mouth/day/hour:minute"' }) requests.post(TOKEN + 'sendMessage', data={ 'chat_id': user_id, 'text': 'Example: 3:24/8:24"' }) set_user_answer_step(user_id, 1) # if user_answer_step == 1: requests.post(TOKEN + 'sendMessage', data={ 'chat_id': user_id, 'text': '"' + user_text + '"' + 'This is your date' }) requests.post(TOKEN + 'sendMessage', data={ 'chat_id': user_id, 'text': 'Trying to set your remind...' }) if Current_time.check_datatime_valid(user_text): set_remind_time(user_id, user_text) requests.post(TOKEN + 'sendMessage', data={ 'chat_id': user_id, 'text': 'Your remind added, good luck!' }) set_user_answer_step(user_id, 0) else: requests.post(TOKEN + 'sendMessage', data={ 'chat_id': user_id, 'text': 'This date not valid, plz try again.' }) db.delete_last_remind() set_user_answer_step(user_id, 0)
def __init__(self, topic): self.limit = 0 # 최대 인증 시도 5회까지 check self.result_msg = None # otp 인증 결과 self.topic = topic # mongoDB 객체 생성해서 OTP 결과값 저장 self.mongo = MongoDB() client = mqtt.Client() client.connect("localhost", 1883, 60) client.on_connect = self.on_connect client.on_message = self.on_message client.loop_start()
def getNiceTextData(web,index): req = r'<div class="sneakerItem"(.+?)</div></div></div>' name = r'<div class="bottom">(.+?)$' number = r'<div class="count">(.+?)[^0-9]+</div></div>' price = r'<div class="num">(.+?)</div></div>' id = r'gid="(.+?)"' matchlist = re.findall(req, web.data.decode("UTF-8"), re.S) for i in matchlist: nameR = re.findall(name,i,re.S)[0] priceR = re.findall(price, i, re.S)[0] numberR = re.findall(number, i, re.S)[0] idR = re.findall(id,i,re.S)[0] MongoDB.insert("nice",nameR,priceR,numberR,index)
def index(): if request.method == 'POST': try: # Check weather the same name is present or not searchString = request.form['content'].replace(" ", "") if MongoDB.mongoDB(searchString) == 'NOT EXIST': main(searchString) if MongoDB.mongoDB(searchString) == "EXIST": coll = db[searchString] cursor = coll.find({}) documents = [] for document in cursor: # Removing '_id' from the document del document['_id'] documents.append(document) # Importing the documents and saving it in a file to read. documents_json = dumps(documents) pd.read_json(documents_json).to_csv('products.txt', sep='\t', index=False, header=False) # Reading the file where products are present. file = open('products.txt', 'r') lines = file.readlines() reviews = [] for line in lines: line = decodeKey(line.replace('₹', '₹')) name = decodeKey(line.split('\t')[0]) offers = decodeKey(line.split('\t')[3]) price = decodeKey(line.split('\t')[1]) specification = decodeKey(line.split('\t')[2]) link = '/reviews/' + urllib.parse.quote_plus( decodeKey(line.split('\t')[4]).split('?')[0]) mydict = { 'Name': name, 'Price': price, 'Specification': specification, 'Offers': offers, 'link': link } reviews.append(mydict) return render_template('results.html', reviews=reviews[0:(len(reviews) - 1)]) except Exception as e: print('The Exception message is: ', e) return 'something is wrong' else: return render_template('index.html')
def get_info(self, list, user_id, flag): if flag == 1: dict1 = list.get('weekData') # 获取全部页面数据(dict型 try: length = len(dict1) except TypeError: print('There is no weekData') length = 0 elif flag == 0: dict1 = list.get('allData') try: length = len(dict1) except TypeError: print('There is no allData') length = 0 # print(length) i = 0 if length != 0: while i < length: # count = dict1[i].get('playCount') # print(count) # print(dict1) # print(type(dict1)) score = dict1[i].get('score') dict2 = dict1[i].get('song') # 获取第i首歌的歌曲信息 # print(dict2) # print(dict2) dict3 = dict2.get('ar') # 获取第i首歌的演唱者信息 song_name = dict2.get('name') song_id = dict2.get('id') singer_name = dict3[0].get('name') singer_id = dict3[0].get('id') dict4 = dict2.get('al') album_name = dict4.get('name') album_id = dict4.get('id') # print(song_name) if flag == 1: # MongoDB.insert_recent_list(user_id, singer_name, singer_id, song_name, song_id, album_name, # album_id, score) # 数据库循环问题 MongoDB.insert_recent_list_show(user_id, singer_name, singer_id, song_name, song_id, album_name, album_id, score) # 数据库循环问题 else: # MongoDB.insert_all_list(user_id, singer_name, singer_id, song_name, song_id, album_name, album_id, # score) MongoDB.insert_all_list_show(user_id, singer_name, singer_id, song_name, song_id, album_name, album_id, score) i = i + 1
def get_database(database_string, config, logger): database = None if database_string=="mongo": database = MongoDB.initialize(config, logger) elif database_string=="postgres": database = Postgres.initialize(config, logger) return database
def get_database(database_string, config, logger): database = None if database_string == "mongo": database = MongoDB.initialize(config, logger) elif database_string == "postgres": database = Postgres.initialize(config, logger) return database
def uploadFilesToMediaFolder(fileItem): t = int(time.time()) filename = "%s_%s" % (str(t), fileItem) d = {} d["type"] = "file" d["extension"] = str(filename.split(".")[-1]) d["name"] = str(filename) d["date"] = t MongoDB.insert_item(d) path = "media/%s" % (filename) destination = open(path, "wb+") message = "Upload feito com sucesso!" for chunk in fileItem.chunks(): destination.write(chunk) destination.close() return message
def insertURL(): URL_obj = MongoDB.URL() filename=tkf.askopenfilename() with open(filename,"r",encoding="utf-8")as f: url_list=f.read().split("\n") for url in url_list: URL_obj.insert(url)
def run(): version = 1 Index = 2 while 1 == 1: time.sleep(random.randint(2200, 3200)) SearchByList(version, Index) version = version + 1 NewCollection("c5", "Name", Index, version) MongoDB.RenewDataDB("c5", version) Index = Index + 1 if version % 5 == 0: time.sleep(random.randint(7200, 10800)) #NewCollection("c5","Name",3,2) #SearchByList(1,3) #MongoDB.ReadData("c5",3) # data = MongoDB.GetNewCollectionName("c5",3) # print(data) # print("length:"+str(len(data))) #CollectionName = MongoDB.SaveCollectionName("c5",1) #print(CollectionName) #GetByPage() #GetByName("轮盘吉兆",-1)
def crawl(): """client = MongoClient(host="localhost", port=27017) db = client["CaptionDownloader"] URL_collection = db["URL"] #Caption_collection = db["Caption"] proc =int(comboxlist.get()) while(URL_collection.find_one({"used": False})): try: po=Pool(16) for i in range(proc): po.apply_async(MongoDB.downloadCaptionFunction())""" while True: try: MongoDB.downloadCaptionFunction() except: pass
async def CreateTask(request): task_request = json.loads(await request.json()) # 1 check the archive file size and judge the validation if task_request.get("archive", ""): archive_size = len(task_request["archive"]["data"]) if archive_size > Configs.MAX_SIZE: return web.Response(status=413, body={ "error": 413, "reason": "Request Entity Too Large", "description": "" }, content_type="application/json") # 2 add task to DB task_doc = DataTransformer.TaskRequest2TaskDoc(task_request) DBoperation = MongoDB.MonoDBOperation() result = await DBoperation.AddDB(task_doc) task_id = result["data"] task_doc["id"] = task_id task_doc.pop("_id") # 3 send response to client response = web.Response(status=200, body=json.dumps(task_doc), content_type="application/json") await response.prepare(request) await response.write_eof() # 4 make work dir for this task await IO.MakeWorkDir(task_id, logger) # 5 extract file if task_request.get("archive", ""): await IO.FileExtract(task_id, task_request["archive"]["type"], task_request["archive"]["data"], logger) return response
def listAllMacroRegions(): """ Retorna todas as micro-regiões consolidadas function() -> list """ lista = MongoDB.list_all_macro_regions() return lista
def listTableMoreInputs(): """ Retorna a tabela com dos dados que serão escolhidos para compor a sulção de analise regressiva function() -> list, matrix (list of lists) """ l = MongoDB.list_all_macro_regions() regions = {} for i in l: if i['region'] not in regions: regions[i['region']] = {} for k,v in sorted(i.items()): if k not in [ '_id', 'lat', 'lng', 'qtd', 'type', 'region']: regions[i['region']][k] = v regionsList = [] isFirstLine = True matrix = {} for r, values in sorted(regions.items()): regionsList.append(r) for k,v in sorted(values.items()): if k not in matrix: matrix[k] = [] if k == 'price_by_meter': matrix[k].append(float("{0:.2f}".format(v))) else: matrix[k].append(v) return regionsList, matrix
def region(request): if request.method == 'POST': list_points = request.POST.get('new_points').split(';') if len(list_points) > 2: price = request.POST.get('price') color = request.POST.get('color') d = {} d['type'] = 'region' d['price'] = price d['color'] = color d['LatLng'] = [] for i in list_points: if i != '': temp = re.sub(r'^\s+', '', re.sub(r'[^\-0-9.0-9\[\],]', '', i)).split(',') d['LatLng'].append([float(temp[0]), float(temp[1])]) MongoDB.insert_item(d) colors = [ ['#F7FBFF', '100-500'], ['#DEEBF7', '500-1000'], ['#C6DBEF', '1000-1500'], ['#9ECAE1', '1500-2000'], ['#6BAED6', '2000-2500'], ['#4292C6', '2500-3000'], ['#2171B5', '3000-3500'], ['#08519C', '3500-4000'], ['#08306B', '4000-5000'], ['#08186C', '5000-6000'], ['#21086C', '6000-10000'], ] regions, table = viewLists.listTableMoreInputs() t = get_template('region.html') html = t.render( Context( {'center' : "-23.2062436,-45.900007" , 'colors' : colors, 'regions' : regions, 'table': table, 'csrf_token' : csrf(request)['csrf_token'] } ) ) html = html.replace("'", "'") return HttpResponse(html)
def showstar(): #从数据库获取数据 data = pd.DataFrame(list(db.get())) #生成直方图 data['star'].value_counts().plot.bar() plt.title(u'电影《一出好戏》评价分布') plt.show()
def CsvtoMongoDB(self, host, database, collection, rowCount, chunkSize, domain_index, domains): self.domain_index = domain_index self.domains = domains self.rowCount = rowCount self.chunkSize = chunkSize self.host = host self.database = database self.collection = collection self.mongoDb = MongoDB.MongoDB(self.host, self.database, self.collection) durum = 0 try: if len(domain_index) != 0: for chunk in pd.read_csv(self.filepath, skiprows=1 + self.rowCount, chunksize=self.chunkSize, usecols=self.domain_index, names=self.domains, low_memory=False, encoding='utf'): try: data_json = json.loads(chunk.to_json(orient='records')) durum = 1 except: print("Csv to json coverting error") error = "Csv to json coverting error ---> user name " writeToErrorsInLogFiles.writeErrorLogFileMongo(error) durum = 0 break elif len(domain_index) == 0: for chunk in pd.read_csv(self.filepath, skiprows=1 + self.rowCount, chunksize=self.chunkSize, names=self.domains, low_memory=False, encoding='utf'): try: data_json = json.loads(chunk.to_json(orient='records')) durum = 1 except: print("Csv to json coverting error") error = "Csv to json coverting error ---> user name " writeToErrorsInLogFiles.writeErrorLogFileMongo(error) durum = 0 break except: print("Csv reading error") error = "Csv reading error ---> user name " writeToErrorsInLogFiles.writeErrorLogFileMongo(error) durum = 0 if durum == 1: self.mongoDb.InsertMany(data_json) durum = 1 else: pass self.mongoDb.ConnectionClose()
def listAllDocuments(): """ Retorna uma lista com todos os documentos do sistema, ordenados pela data function() -> list """ lista = MongoDB.list_all_documents() docs = [ [ i['name'], i['extension'], datetime.datetime.fromtimestamp(i['date'])] for i in lista ] return docs
def listAllCIDs(): """ Retorna uma lista com todos os documentos do sistema, ordenados pela data function() -> list """ lista = MongoDB.list_all_cids() cids = [ [ i['code'], i['neoplasms'] ] for i in lista ] return cids
def validate(): f = open('config', 'r') text = f.readlines() db = text[0].split('=') db = db[1].strip() if db == "MongoLibrary": mongo = MongoDB.MongoMain() elif db == "Library": libraryworker = LibraryWorker() else: mb.showerror(parent=root, message=f"Базы данных {db} не существует")
def run(): version = 5 Index = 7 while 1 == 1: time.sleep(random.randint(2200, 3200)) SearchByList(version, Index) version = version + 1 NewCollection("c5", "Name", Index, version) MongoDB.RenewDataDB("c5", version) Index = Index + 1 if version % 5 == 0: time.sleep(random.randint(7200, 10800))
def get_user_info(self, list): list1 = list.get('result') list2 = list1.get('userprofiles') for user in list2: city_id = user.get('city') # city_id = 1000100 if city_id < 1000100: pro_id = int(city_id / 10000) * 10000 else: pro_id = city_id # print(city_id) pro_name = city.get_city(pro_id) city_name = city.get_city(city_id) # print(city_name) gender = user.get('gender') gender = get_gender(gender) birthday = user.get('birthday') # get_birth(birthday) birth = get_birth(birthday) MongoDB.insert_user_show(user.get('nickname'), user.get('userId'), gender, birth, city_name, pro_name)
def geocode(address, isCenter=False, sensor="false", **geo_args): cep = '' if isCenter: pc = MongoDB.find_city(address[1]) if pc is None: geo_args.update({ 'address': address[0].replace(' ', '+'), 'sensor': sensor }) # print(address) temp = geocode2(address[0], isCenter=True, **geo_args) MongoDB.insert_item(temp) return temp else: return pc else: cep = address[1] pc = MongoDB.find_postal_code(cep) if pc is None: geo_args.update({ 'address': address[0].replace(' ', '+'), 'sensor': sensor }) temp = geocode2(cep, **geo_args) MongoDB.insert_item(temp) return temp else: return pc
async def UpdateTaskDoc(request): task_id = request.match_info.get("task_id", "Wrong Task ID") update_doc = json.load(await request.json()) try: task_id = ObjectId(task_id) except: return web.Response(status=400, reason="Invalid Task ID") query = json.loads(request.json()) DBoperation = MongoDB.MonoDBOperation() result = await DBoperation.UpdateDBbyReplace(task_id, update_doc) return web.Response(status=result["status"], body=json.dumps(result["data"]), reason=result["error"])
def getC5TextData(web,index): ''' 匹配所有的 sell item 这些正则表达式需要根据实际的返回html文件进行调整 ''' req = r'<li class="selling">(.+?)</li>' nameR = r'<span class=" .+? ">(.+?)</span>' priceR = r'<span class="price">¥ (.+?)</span>' numberR = r'<span class="num">(.+?)</span>' soleR = r'^([0-9]+?)[^0-9]+' soleRC = re.compile(soleR) matchlist = re.findall(req, web.data.decode("UTF-8"), re.S) print(matchlist) #每个Item提取对应的 物品名称 价格 数量 for i in matchlist: name = re.findall(nameR, i, re.S)[0] price = re.findall(priceR, i, re.S)[0] #numberO 中带有中文描述符 不是纯数字 numberO = re.findall(numberR, i, re.S)[0] number = re.findall(soleR,numberO)[0] print(name) MongoDB.insert("c5",name,price,number,index)
def getWordcloud(file): #从数据库获取数据 data = pd.DataFrame(list(db.get())) data = data['comment'] #将所有评论合并 comments = '' for comment in data: comments = comments + comment.strip() #去除评论中的标点 pattern = re.compile(r'[\u4e00-\u9fa5]+') filterdata = re.findall(pattern, comments) cleaned_comments = ''.join(filterdata) #增加停止词 jieba.analyse.set_stop_words(stop_words_path='chineseStopWords.txt') #分词 keywords = jieba.analyse.extract_tags(cleaned_comments, topK=500, withWeight=True, allowPOS=()) if file == None: #生成词云 wc = WordCloud(background_color="white", max_words=500, max_font_size=500, random_state=444, font_path='C:\Windows\Fonts\simhei.ttf') wc.generate_from_frequencies(dict(keywords)) else: #设置背景图片 background = np.array(Image.open(file)) image_colors = ImageColorGenerator(background) #生成词云 wc = WordCloud(background_color="white", max_words=500, mask=background, max_font_size=500, random_state=444, font_path='C:\Windows\Fonts\simhei.ttf') wc.generate_from_frequencies( dict(keywords)).recolor(color_func=image_colors) #显示词云 plt.imshow(wc, interpolation='bilinear') plt.axis("off") plt.show()
def Start(self, usr, email): target_number = str(mastermind.Create_Number(self)) attempt = int(0) sample1 = { 'Name': usr, 'Email': email, 'Target': target_number, 'Count': attempt, 'Result': '', 'past_attempt': [] } db.DB().insertion_mongo(self.conn.album, sample1) func_logger.info(f'NEWGAME,{usr},{email},{target_number}') return 'Bom Jogo'
async def TaskBrows(request): query = json.loads(await request.json()) DBoperation = MongoDB.MonoDBOperation() # Only support for key word query now result = await DBoperation.QueryDB( query["query"] if query.get("query", False) else {}) for index in range(len(result["data"])): result["data"][index]["id"] = str(result["data"][index]["_id"]) result["data"][index].pop("_id") return web.Response(status=result["status"], body=json.dumps( result["data"][query["skip"]:query["skip"] + query["limit"]]), content_type="application/json", reason=result["error"])
def randomCIDs(quantity=0): """ Cria uma quantidade aleatória de CIDs para utilizar e Retorna um dictionary com todas os CIDs gerados, uma lista com cada CIDs, com suas cores devidas, e um dictionary com as cores para cada CID function(number) -> dictionary, list, dictionary """ from collections import Counter lista = MongoDB.list_all_cids() colors = ColorsRandom.parallel_gen_colors(quantity) randomList = [] while len(randomList) < quantity: item = random.choice(lista) randomList.append(item['code']) dic = dict(Counter(randomList)) colors = ColorsRandom.parallel_gen_colors(len(dic)) dicColors = {} count = 0 for i in dic: dicColors[i] = colors[count] count+=1 latLng = [] lista = MongoDB.list_all_postal_codes() while len(latLng) < quantity: cid = randomList.pop() item = random.choice(lista) latLng.append( [cid, item['lat'], item['lng'], dicColors[cid] ] ) return dic, latLng, dicColors
def filter(request): regions, table = viewLists.listTableMoreInputs() t = get_template('filter.html') html = t.render( Context( { 'regions' : regions, 'table': table, 'center' : "-23.2062436,-45.900007", 'cids_groups' : MongoDB.list_all_cids_groups(), 'csrf_token' : csrf(request)['csrf_token'] } ) ) html = html.replace("'", "'") return HttpResponse(html)
def listAllNeighborhood(): """ Print para verificação dos dados places[name][0]['abbreviation_state'] = i['abbreviation_state'] places[name][1]['abbreviation_currency'] = i['abbreviation_currency'] places[name][2]['region'] = i['region'] places[name][3]['currency'] = i['currency'] places[name][4]['state'] = i['state'] places[name][5]['type'] = i['type'] places[name][6]['locality'] = i['locality'] places[name][7]['country'] = i['country'] places[name][8]['abbreviation_country'] = i['abbreviation_country'] places[name][9]['neighborhood'] = i['neighborhood'] places[name][10]['price'] = 0 places[name][11]['square_meter'] = 0 places[name][12]['price_by_meter'] = 0 """ lista = MongoDB.list_all_places() places = {} for i in lista: name = i['region'].replace('Sao Jose Dos Campos','') + ' - ' + i['neighborhood'] if name not in places: places[name] = ['','','','','','','','','','',0,0,0] places[name][0] = i['abbreviation_state'] places[name][1] = i['abbreviation_currency'] places[name][2] = i['region'] places[name][3] = i['currency'] places[name][4] = i['state'] places[name][5] = i['type'] places[name][6] = i['locality'] places[name][7] = i['country'] places[name][8] = i['abbreviation_country'] places[name][9] = i['neighborhood'] places[name][10] = 0 places[name][11] = 0 places[name][12] = 0 places[name][10] += i['price'] places[name][11] += i['square_meter'] for i in places: places[i][12] = (places[i][10] / places[i][11]) print(places)
def randomCIDs(num_max_cids): """ Retorna uma lista de aleatoria de CIDs function(number) -> list """ import random listCIDs = MongoDB.list_all_cids() list_analysis = [] i = 0 while i < num_max_cids: r = random.randint(0, (len(listCIDs)-1)) list_analysis.append(listCIDs[r]['code']) i += 1 # print(len(list_analysis)) # print(len(set(list_analysis))) return list_analysis # createDataBaseCIDs()
def __init__(self, api_key, secret_key, uri): self.api_key = api_key self.secret_key = secret_key self.uri = uri # Create websocket to add and cancel orders self.ws = create_connection(self.uri) # Duplicate object for the threading channel WebSocket.Okcoin.instance = self # Threading to run separetely the run_forever function of the websocket aimed to retrieve market data thread = threading.Thread(target=self.run) thread.daemon = True # Daemonize thread - it allows the main application to exit even if the thread is still running thread.start() # Instance of mongoDb database to log trades and candlesticks self.mongo = MongoDB.MongoDB() self.initObject()
def listAllHealths(): """ Retorna uma lista com todos os pontos de saude, com a COR, NOME e ENDERECO function() -> list """ lista = MongoDB.list_all_healths_2_table() hospitals = [ [ i['region'], i['name'], i['address'], i['color'] ] for i in lista if i['type'] == 'Hospital' ] upas = [ [ i['region'], i['name'], i['address'], i['color'] ] for i in lista if i['type'] == 'UPA' ] ubs = [ [ i['region'], i['name'], i['address'], i['color'] ] for i in lista if i['type'] == 'UBS' ] return hospitals, upas, ubs
def Start(self, usr): target_number = str(mastermind.Create_Number(self)) attempt = int(0) check = self.conn.check(self.conn.album, usr) if check == 'OK': sample1 = { 'Name': usr, 'Target': target_number, 'Count': attempt, 'Result': '', 'past_attempt': [] } db.DB().insertion_mongo(self.conn.album, sample1) func_logger.info('{one},{two},{three}'.format(one='Start', two=usr, three=target_number)) return 'Bom Jogo' else: return 'Usuario Ja Existente'
def download_ent_info_and_write2db(self, order, db_name, list_of_ent, success_count=0): db = MongoDB.DataBase(db_name).get_db_handle() ent_info_dirname = order['ent_info_dirname'] tag = order['tag'] token_id = order['token_id'] ent_info_url = order['ent_info_url'] try: if order['get_ent_list'] == 0: # 跳过企业列表下载失败的订单 return logging.warning('正在查询全部企业的详细信息') db_sheet = db[tag + '-' + ent_info_dirname] # 构造Mongodb中的数据表 for each in list_of_ent: ent_name, ent_id = each[0], each[1] # print(ent_name, ent_id) temp = '\"organizationid\":\"{}\"'.format(ent_id) parameters = '{%s}' % temp #例子:parameters = "{\"organizationid\":\"5115801\"}" para = {'tokenId': token_id, 'encryptdata': parameters} response = requests.post(ent_info_url, params=para) content = json.loads(response.text) if content['rescode'] != '00000': logging.error('存在错误,下载失败:{}'.format(content)) break # print(content) print('正在写入数据库:{}'.format(ent_name)) db_sheet.insert_one({ 'ent_name': ent_name, 'info': str(content) }) success_count += 1 time.sleep(1.5) order['saved'] = success_count except Exception as e: order['saved'] = success_count logging.error(e) return
async def TaskStatusUpdate(request): task_id = request.match_info.get("task_id", "Wrong Task ID") task_status = request.match_info.get("status", "Wrong Task STATUS") try: ObjectId(task_id) except: logger.info("Invalid Task ID") return web.Response(status=400, body={ "error": 400, "reason": "Invalid Task ID", "description": "" }, content_type="application/json") # Update Task DOC ===> Finished # 2. query task doc info DBoperation = MongoDB.MonoDBOperation() query_result = await DBoperation.QueryDB({"_id": ObjectId(task_id)}) if len(query_result["data"]) == 0: return web.Response(status=404, body={ "error": 404, "reason": "Invalid Task ID", "description": "" }) # 3. update task doc info task_doc = query_result["data"][0] task_doc["status"] = task_status logger.debug("check updated task doc = {}".format(task_doc)) update_result = await DBoperation.UpdateDBbyReplace( {"_id": ObjectId(task_id)}, task_doc) logger.debug("check task status = {}".format(task_status)) # if the task is finished, should kill the task if task_status == "COMPLETED": url = Configs.Marathon_ADDRESS + "/v2/apps/{app_id}/".format( app_id="mlge1." + task_id) response = requests.delete(url=url) logger.debug("check delete response from marathon = {}".format( response.json())) return web.Response(status=200)
async def DeleteTask(request): task_id = request.match_info.get("task_id", "Wrong Task ID") logger.debug("launchtask task id check = {}".format(task_id)) try: ObjectId(task_id) except: logger.debug("Task ID invalid = {}".format(str(task_id))) return web.Response(status=404, body={ "error": 404, "reason": "Invalid Task ID", "description": "" }) DBoperation = MongoDB.MonoDBOperation() query_result = await DBoperation.QueryDB({"_id": ObjectId(task_id)}) if not len(query_result["data"]): return web.Response(status=404, body={ "error": 404, "description": "", "reason": "the task doesnot exist" }) task_doc = query_result["data"][0] if task_doc["status"] == "WAITING" or task_doc["status"] == "CREATING" or \ task_doc["status"] == "COMPLETED" or task_doc["status"] == "KILLED": await DBoperation.DelDB({"_id": ObjectId(task_id)}) response = web.Response(status=200, body=json.dumps({})) await response.prepare(request) await response.write_eof() await IO.RMWorkDir(task_id, logger) return response else: return web.Response( status=409, body=json.dumps( { "error": 409, "reason": "the task status is {}".format( task_doc["status"]), "description": "" }, ))
def excel(): """ Faz um print para poder verificar todos os pontos consolidados function() """ l = MongoDB.list_all_places() places = {} for i in l: name = i['region'].replace('Sao Jose Dos Campos','') + ' - ' + i['neighborhood'] if name not in places: places[name] = ['', '', 0.0, 0] places[name][0] = i['region'].replace('Sao Jose Dos Campos','') places[name][1] = i['neighborhood'] places[name][2] += i['price'] places[name][3] += i['square_meter'] for i in places: places[i].append(places[i][2] / places[i][3]) for i in places: print( '{0}\t{1}\t{2}'.format (places[i][0], places[i][1], str(places[i][4])))
async def GetTaskDoc(request): task_id = request.match_info.get("task_id", "Wrong Task ID") try: task_id = ObjectId(task_id) except: logger.info("Invalid Task ID") return web.Response(status=400, body={ "error": 400, "reason": "Invalid Task ID", "description": "" }, content_type="application/json") DBoperation = MongoDB.MonoDBOperation() result = await DBoperation.QueryDB({"_id": task_id}) for index in range(len(result["data"])): result["data"][index]["id"] = str(result["data"][index]["_id"]) result["data"][index].pop("_id") return web.Response(status=result["status"], body=json.dumps(result["data"]), content_type="application/json", reason=result["error"])
def listAllPlaces(): """ Retorna uma lista com todos os lugares, com o preco/m2 function() -> list """ lista = MongoDB.list_all_places() places = {} for i in lista: name = i['region'].replace('Sao Jose Dos Campos','') + ' - ' + i['neighborhood'] if name not in places: places[name] = [0.0, 0] places[name][0] += i['price'] places[name][1] += i['square_meter'] for i in places: places[i].append(places[i][0] / places[i][1]) result = [] for i in sorted(places.keys()): f = "{:.2f}".format(places[i][2]) result.append([i, f]) return result
# 利用xpath或者正则表达式分析存储到本地数据库的内容 # import SpiderFactory import Parser import DBAdapter from MongoDB import * import Comm import traceback from datetime import * import time import json if __name__ == "__main__": optionsdic = {'dbname': 'testdb', 'username': '******', 'passwd': '123456'} mongodb = MongoDB('localhost', 27017, **optionsdic) mongodb.conn() spiderFactory = SpiderFactory.SpiderFactory("config.xml") spider = spiderFactory.get_spider() spider.create_session() spider.login_use_cookies() parser = Parser.Parser() comm = Comm.Comm('localhost',5008,40960) comm.conn_to_master() while (True): current_url = comm.request_from_master(1) if (not current_url):
self.file = open(self.get_file_name(), "w") self.logger.log("Database queries complete") self.logger.log( "Conducted " + str(self.streamer.reads) + " reads and " + str(self.streamer.writes) + " writes." ) self.logger.log_to_file( self.file, str(self.response_data).replace("[", "").replace("]", "").replace(" ", "").replace(",", "\n") ) self.logger.log("Average response time: " + str(sum(self.response_data) / len(self.response_data))) def put_tweet_in_database(self): r = random.random() return r <= self.probability def add_to_database(self, data): self.database.add_tweets(data) def query_database(self): self.database.query_database() def get_instance(logger, inserts, selects, database, config): return TweetStream(logger, inserts, selects, database, config) if __name__ == "__main__": m = MongoDB.initialize() p = Postgres.initialize() t = TweetStream(logging, 100, 100, m) t.run()
def locations(request): regions, table = viewLists.listTableMoreInputs() if request.method == 'POST': # center's maps num_clusters = int(request.POST.get('num_clusters')) colors = ColorsRandom.generate_colors(num_clusters) country = request.POST.get('country') state = request.POST.get('state') city = request.POST.get('city') center = Geocode.geocode([country + ' ' + state + ' ' + city, city], isCenter=True) center = str(center['lat']) + "," + str(center['lng']) # ceps's locations ceps_list = [ [country + ' ' + state + ' ' + city + ' ' + i, i] for i in request.POST.get('ceps').split(';')] # items = Geocode.parallelGeocode(ceps_list) items = [] for i in ceps_list: items.append(Geocode.geocode(i)) points, centroids = k_means.k_means_lists(num_clusters, 20, items) dic = k_means.sse(points, centroids) t = get_template('maps.html') html = t.render( Context( { 'regions' : regions, 'table': table, 'center' : center , 'localizations' : points, 'colors' : colors, 'centroids' : centroids, 'sse' : dic, 'hospitals' : MongoDB.list_all_healths(), 'csrf_token' : csrf(request)['csrf_token'] } ) ) html = html.replace("'", "'") return HttpResponse(html) # return render_to_response( # 'index.html', # context_instance=RequestContext(request)) t = get_template('index.html') html = t.render( Context( { 'regions' : regions, 'table': table, 'csrf_token' : csrf(request)['csrf_token'] } ) ) html = html.replace("'", "'") return HttpResponse(html)