def callback(self, ch, method, properties, body): #进行压测 data = json.loads(body) code = data.get('code') host = data.get('host') user = data.get('user') qps = data.get('qps') number = data.get('number') with open("locustfile.py", "w") as file: file.write(code) cmd = "locust -f locustfile.py --host={0} --csv=testfan --no-web -c{1} -r{2} -t{3}".format( host, user, qps, number) print cmd status = command(cmd) print status file = open("D:/workspace_py/testfan_623/testfan_requests.csv") reader = csv.reader(file) result = list(reader) file.close() data.setdefault('result', result) mongo = Mongo() mongo.insert('testfan', 'performance', data) ch.basic_ack(delivery_tag=method.delivery_tag)
def update(): data = request.get_json() client = Mongo() print data result = client.update("testfan", "user", data) print result return jsonify({'status': 200, 'message': '更新成功', 'data': result})
def update_objects(collection_name, objects=None, limit=0, offset=0, *args, **kwargs): try: response = createResponse200() mongo = Mongo() mongo.connect() database = mongo.use_db(database_name) updated = [] if type(objects) is not list: objects = [objects] for object in objects: object_id = { '_id' : object.get('_id') } up = mongo.update(collection_name, object_id, object) updated.append(up) response['result']['objects'] = updated response['result']['count'] = len(updated) response['result']['limit'] = limit response['result']['offset'] = offset except Exception, e: response = createResponse401(str(e))
def search(): data = request.values.to_dict() collection = data.pop('collection', 'coverage') client = Mongo() result = client.search("testfan", collection, data) print result return jsonify({'status': 200, 'message': '查询成功', 'data': result})
def admin(): mongo = Mongo() msg = "" error = "" datos = mongo.getall() if request.method == 'POST': documento = {'titulo': "", 'frase': "", 'urlimage': "", 'source': ""} if 'pregunta' in request.form and 'respuesta' in request.form: documento['titulo'] = request.form['pregunta'] documento['frase'] = request.form['respuesta'] if 'url_imagen' in request.form and len(request.form['url_imagen']) > 0: documento['urlimage'] = request.form['url_imagen'] if 'source_imagen' in request.form and len(request.form['source_imagen']) > 0: documento['source'] = request.form['source_imagen'] if 'idpregunta' in request.form and request.form['idpregunta']: if mongo.updatetexto(documento, request.form['idpregunta']): msg = "Modificado correctamente" else: error = "No se pudo actualizar el valor correctamente" else: if mongo.inserttexto(documento): msg = "Ingresado correctamente" else: error = "No se pudo ingresar ningún valor" if 'username' in session: return render_template('admin.html', msg=msg, error=error, datos=datos) return redirect(url_for('login'))
def parse_file(path_to_file): """The method parse a files and extract all documents one by one, and after it converts **each document** by calling the function **xml_to_dictionary**. After **all the documents** one by one will be saved in the **data base MongoDB** as well all **ERROR**. :param path_to_file: The root of file to be parsed. :type path_to_file: string (Ex: ./crawled/IBECS_LILACS_17072019_pg_1.xml). :param mode: The mode is condition if it receives **"compare"** will saved into a collection time 2. Otherwise in the collection normal, maybe time 1. By default it's None. :type mode: string :returns: Nothing to return. """ try: file = open(path_to_file) xml_content = file.read() bsObj = BeautifulSoup(xml_content,features='lxml') documents = bsObj.findAll("doc") document_dict_list = [] for i, document_xml in enumerate(documents): try: document_dict = Parse.xml_to_dictionary(document_xml) document_dict['file'] = path_to_file document_dict_list.append(document_dict) except Exception as e: Mongo.save_exception_to_mongo(document_xml.find(attrs= {"name":"id"}).text, 'XML to DICTIONARY one <doc> from single XML file',document_xml.find(attrs= {"name":"id"}).text,str(e)) return document_dict_list except Exception as e: Mongo.save_exception_to_mongo(path_to_file,'XML to DICTIONARY (for) multiple <doc> from single XML file',path_to_file,str(e))
def get_col(cls, db_conn_uri=None): """ 链接数据库 """ db = Mongo.get_instance(db_conn_uri) if db_conn_uri else Mongo.get_db() return db[cls._col_name]
def delete_objects(collection_name, objects=None, limit=0, offset=0, *args, **kwargs): try: response = createResponse200() mongo = Mongo() mongo.connect() database = mongo.use_db(database_name) if type(objects) is not list: objects = [objects] deleted = [] for object in objects: if isinstance(object, basestring): object = { '_id' : get_mongo_id(object) } d = mongo.delete(collection_name, object, safe=True) if d.get('n') == 1: deleted.append(object.get('_id')) if type(object) is None: deleted = "All the items have been deleted" response['result']['deleted_objects'] = deleted response['result']['count'] = len(deleted) response['result']['limit'] = limit response['result']['offset'] = offset except Exception, e: response = createResponse401(str(e))
def update_valuations(xlsx: str): mongo = Mongo() ms = mongo.find_last('valuation')['_id'] start = datetime.fromtimestamp(ms / 1000.0).strftime('%Y%m%d') wb = load_workbook(xlsx) wss = [ws.title for ws in wb.worksheets] excel = pd.ExcelFile(xlsx) valuations = [] for ws in wss: if not re.search(r'20\d{6}$', ws) or ws <= start: continue df = pd.read_excel(excel, ws) dic = dict(zip(df['名称'].tolist(), df[ws].tolist())) dic['_id'] = ws valuations.append(dic) print(len(valuations)) if not valuations: return # the valuation hasn't ('中证500', '美国房地产', '10年期国债(美股)', '10年期国债(A股)', '消费50') df = pd.DataFrame(valuations) df['_id'] = pd.to_datetime(df['_id']) print(df) mongo.save('valuation', df)
def create_socket(self): '''When a new session is created, insert in mongo if it doesnt already exist''' init = {"session": self.session, "data": ""} db = Mongo(0) data = db.find_insert(init) return data
def __init_mongo__(self): self.__output__('连接MongoDB...') self.mongoDb = Mongo(host=mongoConfig.get_host(), port=mongoConfig.get_port()) if not self.mongoDb.connect: raise MongoDisconnectError() self.__output__('MongoDB连接成功!')
def resolve(self, data = None): t = TextProcess() mongo = Mongo('config.ini') for k,v in data.items(): sentiment = SentimentAnalysis(v['text']) if (sentiment is not None and len(sentiment) > 0) and (sentiment[0] == 'neg') and (sentiment[1] > 0.7): mongo.saveUpdateOne({'url':v['url']},{'$set':{'title':v['title'],'text': v['text'],'time': v['time'], 'comment_num': v['comment_num'], 'comment':v['comment']}}, self.__db_reddit)
def get_data_from_mongodb(request): from mongo import Mongo import copy, json print 'connecting to mongodb . . . ' mongo = Mongo() # get all data 188 concepts and their votes # mongo.getfromMongo(collection = '', query = '') # mongo.appendDataListToMongo(host=mongo_host, port=mongo_port, dbName=mongo_db, # collectionName=datasets_collection, Item_JsonObject=dataset_object, # id_field=id_field) collection = mongo.connect(host, port, db, collection_questions_responses) cursor = collection.find(no_cursor_timeout=True) response = {} # response['prev_tasks'] = [] response['concepts'] = [] concept = {} for item in cursor: # response['prev_tasks'].extend(copy.deepcopy(concept['votes']['voted_tasks'])) concept['id'] = copy.deepcopy(item['ConceptId']) concept['imports'] = copy.deepcopy(item['attributes']) response['concepts'].append(copy.deepcopy(concept)) concept.clear() from django.http import JsonResponse print JsonResponse(response) return HttpResponse(json.dumps(response), content_type="application/json")
def submit_vote_to_db(request): response = {'success': False} import json if request.is_ajax(): if request.method == 'POST': print request.body response['success'] = True jbody = json.loads(request.body) print jbody # sanitze the vote object before using it in the mongo update() function from mongosanitizer.sanitizer import sanitize sanitize(jbody) # hash/encrypt the voter id before using it in the mongo update() functions import hashlib email = str(jbody['voter_id']) hashed_email_object = hashlib.md5(email.encode()) jbody['voter_id'] = hashed_email_object.hexdigest() # add vote to mongdb from mongo import Mongo mongo = Mongo() collection_q = mongo.connect(host, port, db, collection_questions_responses) query = {} # relaease this to append the vote to its correct concept query['ConceptId'] = jbody['concept_id'] #1 push = {} push['$push'] = {} push['$push']['votes'] = jbody collection_q.update( query, push ) # add voted tasks to users collections collection_u = mongo.connect(host, port, db, collection_users) query_ = {} query_['UserID'] = jbody['voter_id'] push_set = {} push_set['$push'] = {} push_set['$push']['voted_concepts_list'] = jbody['concept_id'] push_set['$push']['voted_cursor_list'] = jbody['voter_survey_cursor'] push_set['$push']['voting_dates'] = jbody['vote_date'] push_set['$set'] = {} push_set['$set']['last_vote_date'] = jbody['vote_date'] collection_u.update( query_, push_set, upsert=True ) from django.http import JsonResponse print JsonResponse(response) return HttpResponse(json.dumps(response), content_type="application/json")
def __init__(self, host=None, port=None, db=None, default_collection='default'): # dict name, persistant, host, port, db, verbose Mongo.__init__(self, host, port, db, default_collection) self.setMtag('[Import_XML]')
def _get_col(cls, db_conn_uri=None): """ 链接数据库 :param db_conn_uri: 链接配置 :return: 链接句柄 """ db = Mongo.get_instance(db_conn_uri) if db_conn_uri else Mongo.get_db() return db[cls.__col_name]
def fill_documents(self, database, collection): self.database = database self.collection = collection mongo = Mongo() self.store.clear() for d in mongo.get_all_documents(database, collection): title = str(d.get(d.keys()[0])) self.store.append([get_icon('doc'), title])
def get_last_myhead_info(self, dc): db = DbLib() m = Mongo() if dc.find('#') != -1: dc = dc.split('#')[0] mid = m.get_mid(dc) head_info = db.get_last_head_info_by_db(mid) return head_info
def toText(path='path to Offres Directorie'): text = '' #set up database mydb = Mongo('TTProject', ['offres-info', 'offres-text']) #move to directory that containes offres os.chdir(path) dirnames = open('names.txt').read().split(" ") references = open('references.txt').read().split(" ") i = 0 for directory in dirnames: if len(directory): x = len(directory) dirname = str(directory)[1:x - 1] #informations to store reference = references[i] name = dirname text += '############################ ' + \ dirname+' ############################\n\n\n' #loop trought Directory : dirPath = path + '\\' + dirname for filename in os.listdir(os.path.join(dirPath)): text += '\n\n#################### ' + filename + ' ####################\n\n' filePath = path + '\\' + dirname + '\\' + filename # check extention # PDFs if filename.endswith('.pdf'): pdfFile = wi(filename=os.path.join(filePath), resolution=300) images = pdfFile.convert('jpeg') for page in images: page.save('page.jpg', 'JPEG') image = Image.open(io.BytesIO(page)) imgPage = wi(image=image) Image = imgPage.make_blob('jpeg') text += pytesseract.image_to_string( Image.open('page.jpg'), lang='fra') # DOCXs if filename.endswith('.docx'): document = Document(filePath) tables = document.tables for p in document.paragraphs: text += p.text text += "\n ###### Tables's Content ###### \n" for table in tables: for row in table.rows: for cell in row.cells: for paragraph in cell.paragraphs: text += (paragraph.text) #inserting to mongo offre = {'_id': reference, "name": name, 'text': text} mydb.insert(offre, 'offres-text') text = '' i += 1
def insert(): data = request.get_json() client = Mongo() collection = data.pop("collection", "user") result = client.insert("testfan", collection, data) if result is None: return jsonify({'status': 400, 'message': '插入失败', 'data': data}) else: return jsonify({'status': 200, 'message': '插入成功', 'data': str(result)})
def gen_n_docs(self, train_size, test_size, random_pull=False): mongo = Mongo(random_pull=random_pull, headline=False) train_news, test_news = mongo.pull_n_docs(train_size, test_size) train_doclist = self._doc2terms(train_news) test_doclist = self._doc2terms(test_news) return train_doclist, test_doclist
def user_add_comment(): title = request.args.get('title') comment = request.args.get('comment') res = Mongo().db.Movies.update({"title": title}, {'$push': { auth.current_user(): comment }}) Mongo().close() return "Comment added"
def test2(indexes: list): df = Mongo().load_valuation(indexes) df = df.set_index('date') # df = df.dropna().set_index('date') df.index.name = None print(df) plt.rcParams['font.sans-serif'] = ['SimHei'] plt.rcParams['axes.unicode_minus'] = False df.plot(figsize=(12, 8), grid=True) plt.show()
def get_user_paper_info(self, dc): from mongo import Mongo m = Mongo() mid = m.get_mid(dc) self.connect() sql_str = 'select telmid from timelimit where mid=%s order by telmid desc' % ( str(mid)) print sql_str r = self.do(sql_str) self.disconn() return r
def save(cls, items): Mongo.init() if not isinstance(items, list): items = [ items.to_dict(), ] else: items = [item.to_dict() for item in items] for item in items: Mongo.db.miner_transactions.insert(item)
def __init__(self): self.deck = Mongo() suits = ('diamonds', 'clubs', 'hearts', 'spades') ranks = {v: v for v in range(2, 11)} ranks["jack"] = 10 ranks["lady"] = 10 ranks["king"] = 10 ranks["ase"] = None for suit in suits: for k, v in ranks.items(): self.deck.insertToCollection({suit: {k: v}})
def auth(level='mortal'): session = cherrypy.session.get('wop') active = Mongo(ActiveSession).find_one('wop', session) if not session or not active: raise cherrypy.HTTPError(404) #raise cherrypy.HTTPRedirect("http://www.youtube.com/watch?v=qCVQpcY1au4") user = active.get('user', None) levels = Mongo(User).find_one('username', user).get('nivel', None) if not levels or level not in levels: #raise cherrypy.HTTPRedirect("http://www.youtube.com/watch?v=qCVQpcY1au4") raise cherrypy.HTTPError(404)
def get_manager(code: str, mongo=None) -> tuple: if not Mongo: mongo = Mongo() managers = mongo.get_manager(code) if len(managers) == 1: name = managers[0]['name'] total_scale = managers[0]['total_scale'] else: name = ', '.join([x['name'] for x in managers]) total_scale = ', '.join([str(x['total_scale']) for x in managers]) return name, total_scale
class Client: def __init__(self): self.__config = Config() self.__mongo = Mongo() self.__db_msg = self.__config.getValue('Config', 'DB_MSG') def insertMsg(self, user, msg): self.__mongo.saveUpdateOne({"chat_user": user}, {'$set': { "chat_body": msg }}, self.__db_msg)
def sort_indexes(begin: date): mongo = Mongo() lst = [] for code in mongo.get_otc_indexes(): r = loop_back(code, begin) name, rate = r[0][0], round(r[0][-1] * 100, 2) scale = get_scale(code, mongo)[1] manager, total_scale = get_manager(code, mongo) lst.append((name, manager, rate, scale, total_scale)) df = pd.DataFrame(lst, columns=['名称(代码)', '基金经理', '年化(%)', '规模(亿元)', '总规模(亿元)']) df = df.sort_values('年化(%)', ascending=False).reset_index(drop=True) print(df)
def FindInsert(self): """Find and Insert record in MongoDB and check""" db = Mongo(0) db_insert = db.insert({ 'session': 'FindInsertTest', 'data': 'FindInsertTest Test data' }) db_findinsert = db.find_insert({ 'session': 'FindInsertTest', 'data': 'FindInsertTest Test data' }) return self.assertEqual('FindInsertTest', db_findinsert['data'])
def __init__(self): self.__config = Config() self.__TOKEN = self.__config.getValue('Telegram', 'TOKEN') if self.__TOKEN is None: raise TeleException(Type.NoneException, 'TOKEN is none') self.__bot = telepot.Bot(self.__TOKEN) self.__id = self.__bot.getMe()['id'] self.__user = self.__bot.getMe()['username'] self.__mongo = Mongo() self.__db_msg = self.__config.getValue('Config', 'DB_MSG') self.__db_user = self.__config.getValue('Config', 'DB_USER') self.messageProcessing()
def __init__(self, config_path): self.uni = universities.API() self.reg = r".*(\s+\w+\s+University).*" self.config = Config(config_path) self.mongo = Mongo(config_path) self.collection = 'data' self.minimum_length = int( self.config.getValueDefault('Data', 'MINLENGTH', 400)) self.max_items = int(self.config.getValue('Data', 'ITEMS')) self.start = int(self.config.getValue('Data', 'START')) if not self.max_items: raise Exception('Please set max_items inside config.ini')
class Digimon(Resource): def __init__(self): self.db = Mongo(app.config["DB_URI"]) self.coll = app.config["COLL_NAME"] self.db_name = app.config["DB_NAME"] def post(self): if not is_valid_request_method(["POST"]) or not contains_valid_path("/insert"): return make_resp(msg_payload(_INVALID_REQUEST_METHOD_MSG), 405) if not is_valid_fields(request, _INSERT_FIELDS): return make_resp(msg_payload(_INVALID_REQUEST_BODY_MSG), 422) request.json["id"] = generate_str_uuid4() if self.db.insert_doc(self.db_name, self.coll, request.json).inserted_id: return make_resp(msg_payload("Your digimon has been inserted!"), 201) def put(self): if not is_valid_request_method(["PUT"]) or not contains_valid_path("/update"): return make_resp(msg_payload(_INVALID_REQUEST_METHOD_MSG), 405) if not is_valid_fields(request, _ALL_VALID_FIELDS): return make_resp(msg_payload(_INVALID_REQUEST_BODY_MSG), 422) if self.db.update_doc(self.db_name, self.coll, request.json).modified_count: return make_resp(msg_payload("Your digimon has been updated!"), 200) return make_resp(msg_payload("No found digimon to update..."), 404) def delete(self): if not is_valid_request_method(["DELETE"]) or not contains_valid_path("/delete"): return make_resp(msg_payload(_INVALID_REQUEST_METHOD_MSG), 405) if not is_valid_fields(request, _VALID_DELETE_FIELDS): return make_resp(msg_payload(_INVALID_REQUEST_BODY_MSG), 422) if self.db.delete_doc(self.db_name, self.coll, request.json).deleted_count: return make_resp(msg_payload("Your digimon has been deleted!"), 200) return make_resp(msg_payload("No found digimon to delete..."), 404) def get(self): if not is_valid_request_method(["GET"]) or not contains_valid_path("/get/"): return make_resp(msg_payload(_INVALID_REQUEST_METHOD_MSG), 405) result = self.db.find_doc(self.db_name, self.coll, request.args.to_dict()) if result is not None: return make_resp(result, 200) return make_resp(msg_payload("No one digimon can be find..."), 404)
def execute(string): response = getResponseQuery(string) posts = getPosts(response) counter = len(posts) while(counter > 0): response = getResponseNext(response.json()['next']) print(response.json()['next']) newPosts = getPosts(response) posts += newPosts counter = len(newPosts) print(counter) print(len(posts)); mongo = Mongo("ztis","ztis") mongo.insertCollection(posts)
def fill_databases(self): mongo = Mongo() for database in mongo.get_all_databases(): row = self.add_row(None, get_icon('db'), database, mongo.get_count(database), True) for document in mongo.get_all_collections(database): self.add_row(row, get_icon('doc'), document, mongo.get_count(database, document), False)
def execute(): mongo = Mongo('ztis', 'ztis-test') # mongo.cloneCollection('ztis-test') # mongo.removeNonEnglishArticles() # mongo.removeDuplicates() # heatMap = HeatMap(mongo.mapReduceLocations()) # heatMap.setMap("map.png") mongo.collection = mongo.findCustom({"locations": {"$in": ["Poland", "PL"]}}) heatMap = HeatMap(mongo.mapReduceLocations()) heatMap.setMap("map2.png")
class Index: def __init__(self, conf, xmlConf=""): config = Config(conf) es_host = config.get("es", "host") es_batch = config.get("es", "batch") es_type = config.get("es", "type") self.es = ESIndex(es_host, es_batch, es_type) mongo_host = config.get("mongo", "host") mongo_port = int(config.get("mongo", "port")) mongo_db = config.get("mongo", "db") mongo_table = config.get("mongo", "table") self.mongo = Mongo(db=mongo_db, host=mongo_host, port=mongo_port, table=mongo_table) self.step = int(config.get("mongo", "step")) if xmlConf == "": self.fields = None else : self.fields = IndexXmlParser(xmlConf) def setIndex(self): self.es.create() for doc_type in index.fields.keys(): mapping = {} for field in index.fields[doc_type]: mapping[field.name] = field.attrs() self.es.putMapping({"properties":mapping}, doc_type = doc_type) def run(self): size = self.mongo.size() iterNum = size / self.step actualSize = 0 while (actualSize < size): dataset = self.fetchMongo(skip = actualSize, limit = self.step) actualSize += self.step try: for row in self.es.bulk(list(dataset)): if row[0] == False: print "%s write to es falied" %(row[1]["index"]["_id"]) except: print traceback.format_exc() def fetchMongo(self, skip = 0, limit = 0): return self.mongo.find().skip(skip).limit(limit)
def get_keypoints_from_dir(self, dbname=None, tabname=None, droptab=False): file_paths = [name for name in os.listdir(self.dir_path) if name.endswith('.jpg') or name.endswith('.png')] mongo_inst = None if dbname is not None and tabname is not None: mongo_inst = Mongo(dbname, tabname) if mongo_inst is None: des_lst = [] for file_path in file_paths: kp, des = self.get_keypoints_from_image(file_path) des_lst.append(des) else: if droptab: mongo_inst.tab.remove({}) for file_path in file_paths: kp, des = self.get_keypoints_from_image(file_path) binary_des = Binary(pickle.dumps(des, protocol=2), subtype=128) entry = dict(_id=file_path, des=binary_des) mongo_inst.insert_without_warning(entry)
def create_objects(collection_name, objects, limit=None, offset=None, *args, **kwargs): try: response = createResponse200() mongo = Mongo() mongo.connect() database = mongo.use_db(database_name) if type(objects) is not list: objects = [objects] # Checking if any of the object already exist by _id objects_id = [ object.get('_id', None) for object in objects ] query = {'_id' : {'$in' : objects_id }} existing = mongo.find(collection_name, query) existing_ids = [ e.get('_id') for e in existing ] if existing: raise Exception("Some objects already exist in '%s' (same _id). Trying using UPDATE method. No objects created." % collection_name) # no limits or offset so far created = mongo.insert(collection_name, objects) response['result']['objects'] = created response['result']['count'] = len(created) response['result']['limit'] = limit response['result']['offset'] = offset except Exception, e: response = createResponse401(str(e))
def execute(self,date_from,date_to,arguments): mongo=Mongo() dict_query=self.mongoQuery.dict_query db_name=dict_query["db_name"] coll=dict_query["coll"] level=self.mongoQuery.level #str_query=""" #return $db.group({ # key: {"Module":1}, # # cond:{"LogLevel":"error","Method": "HoldSeats"}, # # reduce: function ( curr, result ) {result.total += 1;}, # # initial:{ total : 0 } # # }); # """ str_query=dict_query["str_query"] print len(arguments) if self.mongoQuery.args_count != len(arguments): raise Exception("Console Error: Invalid No of arguments!") return mongo.query(db_name,date_from,date_to,str_query,arguments,coll)
def __init__(self, conf, xmlConf=""): config = Config(conf) es_host = config.get("es", "host") es_batch = config.get("es", "batch") es_type = config.get("es", "type") self.es = ESIndex(es_host, es_batch, es_type) mongo_host = config.get("mongo", "host") mongo_port = int(config.get("mongo", "port")) mongo_db = config.get("mongo", "db") mongo_table = config.get("mongo", "table") self.mongo = Mongo(db=mongo_db, host=mongo_host, port=mongo_port, table=mongo_table) self.step = int(config.get("mongo", "step")) if xmlConf == "": self.fields = None else : self.fields = IndexXmlParser(xmlConf)
def read_objects(collection_name, query={}, limit=0, offset=0, sort=settings.DEFAULT_SORT, *args, **kwargs): try: response = createResponse200() mongo = Mongo() mongo.connect() database = mongo.use_db(database_name) count = mongo.count(collection_name, query, limit=0, offset=0, sort=sort) found = mongo.find(collection_name, query, limit=limit, offset=offset, sort=sort) response['result']['total'] = count response['result']['has_more'] = True if limit + offset < count else False response['result']['objects'] = found response['result']['count'] = len(found) response['result']['limit'] = limit response['result']['offset'] = offset except Exception, e: response = createResponse401(str(e))
def execute_str(str_query,db_name,date_from,date_to,coll,arguments): mongo=Mongo() return mongo.query(db_name,date_from,date_to,str_query,arguments,coll)
# -*- coding: utf-8 -*- import sys sys.path.append('../modelos/') #sys.path.append('/run/media/giulliano/Desenvolvimento/workspace/python/onde_almocar/BackEnd/ondealmocar/') #sys.path.append('/run/media/giulliano/Desenvolvimento/workspace/python/onde_almocar/BackEnd/ondealmocar/modelos/') #configuracao de paths from restaurante import Restaurante from endereco import Endereco from perfil_restaurante import PerfilRestaurante from caracteristica import Caracteristica from mongo import Mongo mongo = Mongo(); #inicio da carga r = Restaurante() r.nome = 'Restaurante 1' r.site = 'www.site.com.br' r.telefone = '11 99994444' r.horario = '8h - 22h' r.descricao = unicode('Descrição....','utf-8') r.latitude = -23.568206 r.longitude = -46.649441 e = Endereco() e.logradouro = unicode('Av. Brg. Luís Antônio','utf-8') e.numero = '2250' e.complemento = '' e.cep = '01418100'
def tagCaseHtml(ip, port, filename, content, outpath): res_json_dict = {} res_json_dict["symp_text"] = "" all_pos_tag = set() all_neg_tag = set() all_polarity_res = {} all_range_lower = {} all_range_upper = {} all_kv_res = {} #rpc_client = getRpcClient(tagger_host) rpc_client = TaggerClient(ip, port) timecal = TimeCal() try: bs = rpc_client.basic_struct(content) except: print traceback.format_exc() print "%s:%d deal with %s failed" %(ip, port, filename), print timecal.cost() rpc_client.close() return res_ret = start_html(filename) """ case html format 1: simple text 2: complex text """ caseHtmlMap = OrderedDict() caseHtmlMap[u"基本信息"] = [ ("name", u"姓名", 1), ("gender", u"性别", 1), ("age", u"患病年龄", 1), ("status", u"当前状态", 1), ("diagnosis_date", u"确诊时间",1), ("treat_date", u"治疗时间", 1), ("treat_hospital", u"治疗医院", 1), ("doctor_comment", u"医生评价", 1), ("hospital_comment", u"医院评价", 1) ] caseHtmlMap[u"治疗方案"] = [ ("surgery", u"手术", 2), ("radiotherapy", u"放疗", 1), ("chemotherapy", u"化疗", 1), ("medicine", u"成药", 1), ("herbal", u"中草药", 1), ] caseHtmlMap[u"入院记录"] = [ ("complain", u"主诉", 2), ("sympton", u"主要症状", 2), ("med_his", u"现病史", 2), ("person_his", u"个人史", 1), ("family_his", u"家庭史", 1), ("med_exam", u"医学检查", 2), ("body_exam", u"体格检查", 2), ("spec_exam", u"专科检查", 2), ("acce_exam", u"辅助检查", 2), ] caseHtmlMap[u"出院记录"] = [ ("med_exp", u"入院情况及诊疗经过", 2), ("effect", u"出院情况及治疗效果", 2), ("diagnosis", u"出院诊断", 2), ("advice", u"出院医嘱", 1), ("days", u"住院天数", 1), ] for p in caseHtmlMap: res_ret += title_1(p) for row in caseHtmlMap[p]: title = row[0] name =row[1] type_ = row[2] if title in bs: if type_ == 1: #simple text res_ret += title_2(name) res_ret += norm_text(bs[title]) elif type_ == 2: res_ret += title_2(name) timecal.reset() try: tag_res = rpc_client.tag(bs[title], "doc") except: print traceback.format_exc() print "%s:%d deal with %s failed" %(ip, port, filename), timecal.cost() rpc_client.close() return all_pos_tag = all_pos_tag | set(tag_res.pos_tag) all_neg_tag = all_neg_tag | set(tag_res.neg_tag) for k in tag_res.polarity_res: all_polarity_res[k] = tag_res.polarity_res[k] for k in tag_res.range_res_lower: all_range_lower[k] = tag_res.range_res_lower[k] for k in tag_res.range_res_upper: all_range_upper[k] = tag_res.range_res_upper[k] for k in tag_res.kv_value: all_kv_res[k] = tag_res.kv_value[k] res_json_dict["symp_text"] += bs[title] + "\r\n" res_ret += norm_text(tag_res.mk_str) rpc_client.close() res_ret += end_html() res_json_dict["symp_pos_tag"] = list(all_pos_tag) res_json_dict["symp_neg_tag"] = list(all_neg_tag) for key in all_polarity_res: res_json_dict[key] = all_polarity_res[key] for key in all_range_lower: res_json_dict[key] = all_range_lower[key] for key in all_range_upper: res_json_dict[key] = all_range_upper[key] for key in all_kv_res: res_json_dict[key] = all_kv_res[key] res_json = json.dumps(res_json_dict) try: js = json.loads(res_json.encode('utf8')) except: print s, path js = json.loads(s) li = [] for tag in js["symp_pos_tag"]: li.append(tag) js["symp_pos_tag"] = li li = [] for tag in js["symp_neg_tag"]: li.append(tag) js["symp_neg_tag"] = li js["symp_text"] = js["symp_text"].strip() for key in all_polarity_res: js[key] = all_polarity_res[key] for key in all_range_lower: js[key] = all_range_lower[key] for key in all_range_upper: js[key] = all_range_upper[key] for key in all_kv_res: js[key] = all_kv_res[key] out_file = os.path.join(outpath, filename.split(".")[0] + ".html") writeFile(out_file, res_ret) id = filename.split('.')[0] try: #es.indexWapper(int(id), js) key = {"_id":int(id)} mongo_client = Mongo(host=mongo_host, port=mongo_port, db=mongo_db, table=mongo_table) mongo_client.update(key, js) #mongo_client.close() except: print id, js, traceback.format_exc() return print out_file
# -*- coding: utf-8 -*- #configuracao de paths import os import sys import django.core.handlers.wsgi sys.path.append('/home/giulliano/Desktop/onde_almocar/onde_almocar/BackEnd/modelos') sys.path.append('/home/giulliano/Desktop/onde_almocar/onde_almocar/BackEnd/algoritmos/') #inicio do teste from mongo import Mongo from filtro import Filtro f = Filtro() f.latitude = -23.565118 f.longitude = -46.652069 m = Mongo() resultado = m.pesquisar_restaurante(f) print resultado
class Bird(): def __init__(self, name=None, family=None, continent=None, visible=False): self.name = name self.family = family self.continent = continent self.visible = visible self.M = Mongo() def list(self, id=None): """ Lists all birds if id is None. Lists one bird if id is set. """ if id is None: # get data from Mongo data = self.M.get() if not data: return False # Build a list of our object-id's response = [] for b in data: if b['visible'] == True: response.append(str(b['_id'])) # if we have any id's return to user if len(response) > 0: return response return False else: # get data from Mongo data = self.M.get(id) if not data: return False # here we convert _id to id for valid JSON data["id"] = str(data["_id"]) # and pops the _id data.pop("_id") return data return False def insert(self): """ Creates our bird dict and ensures insertion. """ # create utc-date for when bird is added self.added = datetime.utcnow().strftime("%Y-%m-%d") # build our bird-dict bird = { "name": self.name, "family": self.family, "continents": self.continent, "visible": self.visible, "added": self.added } # insert bird id = self.M.insert(bird) return id def delete(self, id): """ Deletes the bird with supplied id from collection """ status = self.M.delete(id) # if deleted documents > 0 we've deleted the bird if status['n'] > 0: return True else: return False
def fill_document(self, database, collection, filter): mongo = Mongo() text = self.parse(mongo.get_content(database, collection, filter)) self.buffer.set_text(text)
def test(self): mongo = Mongo() mongo.populate() things = mongo.count() self.failIf(things != 5)
#configuracao de paths import os import sys import django.core.handlers.wsgi sys.path.append('/home/giulliano/Desktop/onde_almocar/onde_almocar/BackEnd/modelos') sys.path.append('/home/giulliano/Desktop/onde_almocar/onde_almocar/BackEnd/algoritmos/') #inicio do teste from mongo import Mongo from restaurante import Restaurante from endereco import Endereco from perfil_restaurante import PerfilRestaurante from caracteristica import Caracteristica m = Mongo() r = Restaurante() r.nome = 'Lanchonete 1' r.site = 'www.site.com.br' r.telefone = '11 99994444' r.horario = '8h - 22h' r.descricao = 'Descrição....' e = Endereco() e.logradouro = 'Av Paulista' e.numero = '1000' e.complemento = '' e.cep = '01311000' e.latitude = -23.565118 e.longitude = -46.652069
# -*- coding: utf-8 -*- import sys sys.path.append('/home/giulliano/Desktop/onde_almocar/onde_almocar/modelos/') sys.path.append('/run/media/giulliano/Desenvolvimento/workspace/python/onde_almocar/BackEnd/ondealmocar/') sys.path.append('/run/media/giulliano/Desenvolvimento/workspace/python/onde_almocar/BackEnd/ondealmocar/modelos/') #configuracao de paths from cliente import Cliente from perfil_cliente import PerfilCliente from mongo import Mongo #inicio da carga mongo = Mongo(); p = PerfilCliente() p.preco_medio = 30.0 p.conforto = 2 p.barulho = 5 c = Cliente() c.nome = 'Giulliano' c.email = '*****@*****.**' c.perfil_cliente = p; c.almocos = [1,2,3]; c.amigos = [1,2,3]; c.favoritos = [1,2,3]; json_data = c.extrair_json() mongo.inserir_cliente(json_data);
def __init__(self, name=None, family=None, continent=None, visible=False): self.name = name self.family = family self.continent = continent self.visible = visible self.M = Mongo()
def __init__(self, dbname, tabname): self.mongo_inst = Mongo(dbname, tabname)
class Comparison(object): def __init__(self, dbname, tabname): self.mongo_inst = Mongo(dbname, tabname) # Get descriptor numpy array by id from mongodb def query_descriptor_from_id(self, file_name): d = self.mongo_inst.find_one_by_id(file_name) return self._unpickle(d['des']) @staticmethod def _unpickle(binary): return pkl.loads(binary) @staticmethod def compare_des(des1, des2): bf = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck=True) matches = bf.match(des1, des2) # overall_squared_diff return np.mean([match.distance**2 for match in matches]) def closest_k(self, query_des, k=10): if type(query_des) == str: query_des_arr = self.query_descriptor_from_id(query_des) elif type(query_des) == np.ndarray: query_des_arr = query_des else: raise TypeError('query_des must be a string (image file name) or numpy array (descriptor)') it = self.mongo_inst.tab.find() bottom_k_diff = np.empty(k) bottom_k_names = [] i = 0 for entry in it: des_arr = self._unpickle(entry['des']) name = entry['_id'] mean_diff = self.compare_des(query_des_arr, des_arr) if i < k: bottom_k_diff[i] = mean_diff bottom_k_names.append(name) else: if mean_diff < np.min(bottom_k_diff): min_ind = np.argmin(bottom_k_diff) bottom_k_diff[min_ind] = mean_diff bottom_k_names[min_ind] = name i += 1 sorted_ind = np.argsort(bottom_k_diff) sorted_diff = bottom_k_diff[sorted_ind] sorted_name = np.array(bottom_k_names)[sorted_ind] return sorted_name, sorted_diff @staticmethod def plot_closest_10(dirname, sorted_names, sorted_diff): fig = plt.figure(1, (12., 5.)) grid = ImageGrid(fig, 111, nrows_ncols=(2, 5), axes_pad=0.1) for i, name_diff in enumerate(zip(sorted_names, sorted_diff)): name, diff = name_diff image_arr = cv2.imread(os.path.join(dirname, name)) grid[i].imshow(image_arr) # The AxesGrid object work as a list of axes. grid[i].axes.get_xaxis().set_visible(False) grid[i].axes.get_yaxis().set_visible(False) label = mpatches.Patch(color='red', label=str(np.round(diff))) grid[i].legend(handles=[label], loc=4) plt.show()
def local_img(self,string): from gl import LOG update_flag = False LOG.info('start local img,question id [%s]' % string) question_id = int(string) mongo = Mongo() mongo.connect('resource') mongo.select_collection('mongo_question_json') json = mongo.find_one({'question_id':question_id},{'content':1}) mongo.select_collection('mongo_question_html') html = str(mongo.find_one({'question_id':question_id},{'content':1})) #img_expr = parse("content[*].*[*]") #img_list = [match.value for match in img_expr.find(json) if isinstance(match.value,dict) and\ # 'type' in match.value.keys() and match.value['type'] == 'image'] #pprint.pprint(json) content = '' if json: content = json['content'] for key,wrap in content.items(): for idx,item in enumerate(content[key]): if isinstance(item,str): continue if isinstance(item,dict): if 'group' in item.keys(): group = item['group'] for index,item1 in enumerate(group): if isinstance(item1,dict) and 'type' in item1.keys() and item1['type'] == 'image': ori_url = item1['value'] qiniu_url = self._upload_qiniu(ori_url) if qiniu_url: content[key][idx]['group'][index]['value'] = qiniu_url update_flag = True html = html.replace(ori_url,qiniu_url) if 'type' in item.keys() and item['type'] == 'image': ori_url = item['value'] qiniu_url = self._upload_qiniu(ori_url) if qiniu_url: content[key][idx]['value'] = qiniu_url update_flag = True html = html.replace(ori_url,qiniu_url) if isinstance(item,list): for index,item1 in enumerate(item): if 'type' in item1.keys() and item1['type'] == 'image': ori_url = item1['value'] qiniu_url = self._upload_qiniu(ori_url) if qiniu_url: content[key][idx][index]['value'] = qiniu_url update_flag = True html = html.replace(ori_url,qiniu_url) if update_flag: mongo.select_collection('mongo_question_json') json_effected = mongo.update_many({'question_id':question_id},{'$set':{'content':content}}) mongo.select_collection('mongo_question_html') html_effected = mongo.update_many({'question_id':question_id},{'$set':{'content':html}}) LOG.info('mongo update successful json[%d] -- html[%d]' % (json_effected,html_effected))
def post(self): for i in range(1): self.set_header("Access-Control-Allow-Origin", "*") LOG.info('API IN[%s]' % (self.__class__.__name__)) LOG.info('PARAMETER IN[%s]' % self.request.arguments) ret = {'code':'','message':''} essential_keys = set(['json','html','topic','level','type','group','chapter','ref']) if Base.check_parameter(set(self.request.arguments.keys()),essential_keys): ret['code'] = 1 ret['message'] = '无效参数' LOG.error('ERR[in parameter invalid]') break question_json = ''.join(self.request.arguments['json']) question_html = ''.join(self.request.arguments['html']) question_topic = ''.join(self.request.arguments['topic']) question_level = ''.join(self.request.arguments['level']) question_type = ''.join(self.request.arguments['type']) question_group = ''.join(self.request.arguments['group']) question_chapter = ''.join(self.request.arguments['chapter']) ref = ''.join(self.request.arguments['ref']) if Business.is_level(question_level) is False: ret['code'] = 1 ret['message'] = '无效参数' LOG.error('ERR[level is invalid]') break try: #question_json = urllib.unquote(question_json) #question_json = question_json.replace("'","\"") encode_json = json.loads(question_json,encoding = 'utf-8') #question_html = urllib.unquote(question_html) #question_html = question_html.replace("'","\"") encode_html = json.loads(question_html,encoding = 'utf-8') LOG.info('Json Loads Successful') answer_num = 0 if Base.empty(question_topic) and Base.empty(question_chapter): ret['code'] = 1 ret['message'] = '无效参数' LOG.error('ERR[topic and chapter empty]') break if Base.empty(question_group): ret['code'] = 1 ret['message'] = '无效参数' LOG.error('ERR[group empty]') break if Base.empty(question_topic) is False: topic_list = question_topic.split(',') for question_theme in topic_list: if Business.is_topic(question_theme) is False: ret['code'] = 1 ret['message'] = '无效参数' LOG.error('ERR[topic %s invalid]' % question_theme) break type_name = Business.is_type(question_type) if type_name is False: ret['code'] = 1 ret['message'] = '无效参数' LOG.error('ERR[type is invalid]') break option_num = 0 LOG.info('Json Parse Start') if type_name == '选择题'.decode('utf-8'): if 'answer' in encode_json.keys(): answer_num = len(encode_json['answer']) option_num = len(encode_json['options']) if type_name == '填空题'.decode('utf-8'): if 'answer' in encode_json.keys(): answer_num = max([int(group['index']) for group in encode_json['answer']]) LOG.info('Json Parse End') if not Base.empty(question_chapter): if Business.chapter_id_exist(question_chapter) is False: ret['code'] = 1 ret['message'] = '无效参数' LOG.error('ERR[seriess %s invalid]' % question_theme) break except (ValueError,KeyError,TypeError): ret['code'] = 1 ret['message'] = '无效参数' LOG.error('ERR[json format invalid]') break except CKException: ret['code'] = 3 ret['message'] = '服务器错误' LOG.error('ERR[mysql exception]') break key = question_topic + question_level + question_type + question_group secret_key = hashlib.sha1(key).hexdigest() qiniu = QiniuWrap() json_key = 'tmp_' + secret_key + '.json' if not qiniu.upload_data("temp",json_key,question_json): ret['code'] = 4 ret['message'] = '服务器错误' LOG.error('ERR[json upload qiniu exception]') break html_key = 'tmp_' + secret_key + '.html' if not qiniu.upload_data("temp",html_key,question_html): ret['code'] = 4 ret['message'] = '服务器错误' LOG.error('ERR[html upload qiniu exception]') break configer = Configer() remote_host = configer.get_configer('REMOTE','host') remote_port = configer.get_configer('REMOTE','port') remote_uri = configer.get_configer('REMOTE','uri') remote_timeout = configer.get_configer('REMOTE','timeout') remote_url = "http://%s:%s/%s" % (remote_host,remote_port,remote_uri) token = self.get_cookie("teacher_id") LOG.info('TOKEN[%s]' % token) if token is None: ret['code'] = 6 ret['message'] = 'token失效' LOG.error('ERROR[token empty]') break post_data = {'token' : token} client = httpclient.AsyncHTTPClient() response = yield gen.Task(client.fetch,remote_url,request_timeout = int(remote_timeout),method = 'POST',body = urllib.urlencode(post_data )) #response = Http.post(remote_url,post_data) if 200 == response.code: encode_body = json.loads(response.body,encoding = 'utf-8') if 0 == encode_body['code'] or 2 == encode_body['code']: ret['code'] = 7 ret['message'] = 'token失效' LOG.error('ERR[token not exist]') break if 1 == encode_body['code']: subject_id = encode_body['subject_id'] grade_id = encode_body['grade_id'] system_id = encode_body['system_id'] org_type = encode_body['org_type'] if 0 != int(question_group): if Business.group_id_exist(question_group,system_id) is False: ret['code'] = 8 ret['message'] = '无效参数' LOG.error('ERROR[group not exist]') break db = Mysql() question_sql = "insert into entity_question (difficulty,question_docx,html,upload_time,update_time,question_type,subject_id,new_format,upload_id,upload_src,question_group,grade_id,state,is_single,question_type_id,answer_num,count_ref,paper_year,parent_question_id,count_options) values (%(level)d,'%(json)s','%(html)s',now(),now(),'%(type)s',%(subject_id)d,1,%(upload_id)d,%(upload_src)d,%(question_group)d,%(grade_id)d,'ENABLED',1,%(question_type_id)d,%(answer_num)d,0,0,0,%(count_options)d);" link_topic_sql = "insert into link_question_topic (question_id,topic_id) values (%(q_id)d,%(t_id)d);" link_chapter_sql = "insert into link_question_chapter (question_id,chapter_id) values (%(q_id)d,%(c_id)d);" try: db.connect_master() db.start_event() question_res = db.exec_event(question_sql,level = int(question_level),json = json_key,html = html_key,type = type_name,subject_id = int(subject_id),upload_id = int(system_id),upload_src = int(org_type),question_group = int(question_group),grade_id = int(grade_id),question_type_id = int(question_type),answer_num = answer_num,count_options = option_num) question_sql = db.get_last_sql() question_id = db.get_last_id() LOG.info('RES[%s] - INS[%d]' % (question_res,question_id)) if Base.empty(question_topic) is False: topic_list = question_topic.split(',') for question_theme in topic_list: topic_res = db.exec_event(link_topic_sql,q_id = int(question_id),t_id = int(question_theme)) topic_sql = db.get_last_sql() topic_id = db.get_last_id() LOG.info('RES[%s] - INS[%d]' % (topic_res,topic_id)) if not Base.empty(question_chapter): chapter_res = db.exec_event(link_chapter_sql,q_id = int(question_id),c_id = int(question_chapter)) chapter_sql = db.get_last_sql() chapter_id = db.get_last_id() LOG.info('RES[%s] - INS[%d]' % (chapter_res,chapter_id)) except DBException as e: db.rollback() db.end_event() ret['code'] = 3 ret['message'] = '服务器错误' LOG.error('ERR[insert mysql error]') break else: ret['code'] = 3 ret['message'] = '服务器错误' LOG.error('ERROR[remote error]') break mongo = Mongo() try: mongo.connect('resource') mongo.select_collection('mongo_question_json') json_id = mongo.insert_one({"content":encode_json,"question_id":question_id}) LOG.info('MONGO[insert json] - DATA[%s] - INS[%s] - Question Id[%d]' % (json.dumps(encode_json),json_id,question_id)) mongo.select_collection('mongo_question_html') html_id = mongo.insert_one({"content":encode_html,"question_id":question_id}) LOG.info('MONGO[insert html] - DATA[%s] - INS[%s] - Question Id[%d]' % (json.dumps(encode_html),html_id,question_id)) except DBException as e: db.rollback() db.end_event() ret['code'] = 3 ret['message'] = '服务器错误' LOG.error('ERR[mongo exception]') break db.end_event() if int(ref): doit = Doit() doit.local_img(str(question_id)) LOG.info('Local Img [%s]' % str(question_id)) ret['code'] = 0 ret['message'] = 'success' ret['id'] = question_id LOG.info('PARAMETER OUT[%s]' % ret) LOG.info('API OUT[%s]' % (self.__class__.__name__)) self.write(json.dumps(ret)) self.finish()
def __init__(self, url=None): self._db = Mongo.get_db(url)