def add_to_index(index, model): if not elasticsearch: return payload = {} for field in model.__searchable__: payload[field] = getattr(model, field) elasticsearch.index(index=index, doc_type=index, id=model.id, body=payload)
def add_to_index(index, model): if not es: return payload = {} for field in model.__searchable__: payload[field] = getattr(model, field) es.index(index=index, id=model.id, body=payload)
def add_word(): requestObject = request.get_json() #in case of post get_json() try: if "id" not in requestObject: #if id comes from front end then it is update and if not then it is addnew requestObject['date_added'] = getCurrentTime() requestObject['date_modified'] = getCurrentTime() idn = str(uuid.uuid4( )) #this generates a new random id and converts it to string es.index(index="dict_word", doc_type="dictionary", id=idn, body=requestObject) #create a new entry,it means add return jsonify({"response": "success", "id": idn}) else: requestObject['date_modified'] = getCurrentTime() es.update(index="dict_word", doc_type="dictionary", id=requestObject["id"], body={"doc": requestObject}) return jsonify({"response": "success", "id": requestObject["id"]}) except Exception as e: print str(e) return jsonify({"response": "failure", "error": str(e)})
def add_to_index(index, model): # 在指定id的情况下向ES的数据库插入数据,若不存在就插入,若存在则更新 if not es: return payload = {} for field in model.__searchable__: payload[field] = getattr(model, field) es.index(index=index, id=model.id, body=payload)
def update_search_index(self): es.index( index=app.config.get("ES_INDEX_NAME"), doc_type=app.config.get("ES_TYPE_NAME"), id=self.id, body={"title": self.title, "content": self.content}, ) app.logger.info("[ES] Index post {}: {}".format(self.id, self.title))
def add_word(): requestObject = request.get_json() ids = str(uuid.uuid4()) data = requestObject data['id'] = ids data['hit_points'] = 1.0 es.index(index="dictionary", doc_type="words", id=ids, body=data) return jsonify({"response": "success", "id": ids})
def upload_endpoint(): files = request.files.getlist("file[]") d = {} for f in files: sf = secure_filename(f.filename) es_dict = {"file": f.read().encode("base64"), "title": sf, "owner": current_owner.organization.organization} es.index(index=DEFAULT_INDEX, doc_type="attachment", body=es_dict) f.close() return redirect(url_for(".root"))
def es_rebuild(): for entry in Entry.select(): es.index(index=app.config.get('ES_INDEX_NAME'), doc_type=app.config.get('ES_TYPE_NAME'), id=entry.id, body={ 'title': entry.title, 'content': entry.content } ) app.logger.info('[ES] Index post {}: {}'.format(entry.id, entry.title)) return jsonify({'status': 'success'})
def add_businfo(): requestObject= request.get_json() #in case of post get_json() try: #if id comes from front end then it is update and if not then it is addnew print "dollz" requestObject['start_Time']=datetime.strptime(requestObject['start_Time'], '%H:%M:%S')# formatted datetime, for comparison idn = str(uuid.uuid4()) #this generates a new random id and converts it to string es.index(index = "busdb",doc_type="businfo",id=idn,body=requestObject) #create a new entry,it means add [ db, table] return jsonify({"response":"success","id":idn}) except Exception as e: print str(e) return jsonify({"response":"failure","error":str(e)})
def upload_endpoint(): files = request.files.getlist('file[]') d = {} for f in files: sf = secure_filename(f.filename) es_dict = { 'file': f.read().encode('base64'), 'title': sf, 'owner': current_owner.organization.organization } es.index(index=DEFAULT_INDEX, doc_type='attachment', body=es_dict) f.close() return redirect(url_for('.root'))
def upload_endpoint(): """ Takes a document and stores it in the elasticsearch index """ files = request.files.getlist('file[]') d = {} for f in files: sf = secure_filename(f.filename) es_dict = { 'file': f.read().encode('base64'), 'title': sf, 'owner': 'blank' # current_owner.organization.organization } es.index(index=es_index, doc_type='attachment', body=es_dict) f.close() return redirect(url_for('.root'))
def add_bookinfo(): requestObject= request.get_json() #in case of post get_json() try: #requestObject['start_Time']=datetime.strptime(requestObject['start_Time'], '%H:%M:%S') idn = str(uuid.uuid4()) #this generates a new random id and converts it to string result = es.get( index="busdb", doc_type="businfo",id = requestObject['bus_id'] ) if result["found"] == "false": return jsonify({"response": "failure", "error": "No bus with given id"}) result = result.get('_source') for i in result['seatsinfo']: if i['date'] == requestObject['date']: i['seats_left']= i['seats_left'] - requestObject['booked_seats'] if i['seats_left']<0: return jsonify({"response":"failure","error":"Seats Full"}) es.update(index="busdb", doc_type="businfo",id = requestObject['bus_id'],body = {"doc":result}) es.index(index = "busdb",doc_type="bookinfo",id=idn,body=requestObject) #create a new entry,it means add return jsonify({"response":"success","id":idn}) except Exception as e: print str(e) return jsonify({"response":"failure","error":str(e)})
def add_to_es(model_instance): """ Indexes an item into the elasticsearch cluster. :param model_instance: The model object to be added. :return: Bool """ try: json = model_instance.jsonify() model_name = model_instance.__class__.__name__.lower() result = es.index(index=model_name, doc_type=model_name, body=json) logger.info('Added to elasticsearch: %s', result) return True except: logger.error('Couldn\'t add to elasticsearch: %s', result)
def post(): '''############################### Submit a new post ################################## ''' form = PostForm() if form.validate_on_submit(): #save the post post = Post(body=form.post.data, timestamp=datetime.utcnow(), author=g.user) db.session.add(post) db.session.commit() flash('Your post is live!') res = es.index(index="microblog", doc_type='post', body=dict(id=post.id, body=post.body, user_id=post.user_id, timestamp=post.timestamp)) app.logger.debug(res['created']) return redirect(url_for('.index'))
app.config['links'] = links app.config['title'] = title if not os.path.exists(app.config['STATIC_DIR'] + '/reports'): """При первом запуке загружаем и индексируем все пдф, поэтому запуск долгий проверяем также наличие индекса в эластике""" os.mkdir(app.config['STATIC_DIR'] + '/reports') if not es.indices.exists(index='reports'): if not create_index(es): os._exit(1) for year, link in links.items(): if int(year) < 2000: break path = f'{app.config["STATIC_DIR"]}/reports/report{year}.pdf' if os.path.exists(path): with open(path, "rb") as fd: document = fd.read() else: with open(path, 'wb+') as fd: document = download(link) fd.write(download(link)) es.index(index='reports', doc_type="pdf", id=year, pipeline='attachment', body={ "data": (b64(document)).decode('ascii'), }) app.run(host="127.0.0.1", port=8080, debug=True)
def create_sample(): try: path="Project 2014-15.xlsx" book = xlrd.open_workbook(path) sheet = book.sheet_by_index(0) print "Reading from excel file" for row in range (5,20): jsonobj={} members=[] member={} mentor=[] membersid=[] jsonobj['projecttype']=random.choice(["major","minor"]) jsonobj['lang']=random.choice(["C++","Perl","Ruby","Python","Java",".Net","ASP","PHP","C"]) jsonobj['approved']=random.choice([True,False]) jsonobj['evaluated']=random.choice([True,False]) jsonobj['source_code']="http://random.org" jsonobj['synopsis']="http://random.org" jsonobj['remarks']=random.choice(["Good work","Excellent","Bad","Could have been better"]) jsonobj['rating']=decimal.Decimal(random.randrange(1000))/100 data=sheet.cell_value(row,10) jsonobj["title"]=str(data) data=sheet.cell_value(row,9) jsonobj["description"]=data data=sheet.cell_value(row,8) member['eno']=str(data)[:-2] member['name']="" member['email']=''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(7)) + "@gmail.com" members.append(member.copy()) membersid.append(str(data)[:-2]) data=sheet.cell_value(row,7) member['eno']=str(data)[:-2] member['name']="" member['email']=''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(7)) + "@gmail.com" members.append(member.copy()) membersid.append(str(data)[:-2]) data=sheet.cell_value(row,3) member['eno']=str(data)[:-2] member['name']="" member['email']=''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(7)) + "@gmail.com" members.append(member.copy()) membersid.append(str(data)[:-2]) data=sheet.cell_value(row,5) mentor.append(data.lower()) jsonobj['members']=members jsonobj['mentor']=mentor print jsonobj res=mongo.db.groups.insert({"projecttype":jsonobj['projecttype'],"members":membersid}) es.index(index='probase_repos',id=str(res), doc_type='projects', body=jsonobj) members=[] print("Inserting Dates") doc={ "_id" : ObjectId("565dbe993fb58bb4129af671"), "projecttype" : "minor", "action" : "registration", "date" : datetime(2016,10,01,0,0) } res=mongo.db.dates.insert(doc) doc={ "projecttype" : "major", "action" : "registration", "date" : datetime(2016,12,01,0,0) } res=mongo.db.dates.insert(doc) print("inserting teachers") doc={ "_id" : ObjectId("560d604a080ffddcba75178d"), "name" : [ "sanjeevsharma", "sanjeevksharma", "sanjaygupta", "Sanjay Goel", "sangeetamittal", "sangeeta", "sandeepsingh", "sanchikagupta", "samiyakhan", "samirdevgupta", "sakshiagarwal", "rubybeniwal", "rohitpalsingh", "rkdwivedi", "riteshsharma", "richagupta", "reemagabrani", "reemabudhiraja", "rcjain", "ravishanker", "ravindrakumar", "rakhibansal", "rajnishmisra", "Rajkumartiwari", "rajeshdubey", "rajalakshmi", "RAHULSHARMA", "rahulkaushik", "radhikakhanna", "rachana", "purteekohli", "puneetrana", "puneetpannu", "priyankaarora", "priyadarshini", "pratibhayadav", "prashantkaushik", "prakashkumar", "pawanupadhyay", "patokumari", "parulpuri", "parulagarwal", "parmeetkaur", "papiachowdhury", "pankajyadav", "pankajpachauri", "pammigauba", "padamkumar", "niyatiaggrawal", "nitinchanderwal", "nidhisinha", "nidhigupta", "nfaruqui", "nehasrivastava", "neetusingh", "neetusardana", "neerjapande", "neerajwadhwa", "navneetsharma", "navendugoswami", "naseemabidi", "namitasinghal", "muktamani", "muktagoyal", "mukeshkumar", "mudayabanu", "mstyagi", "mrbehera", "moonisshakeel", "monikajiit", "monicachaudhary", "minakshigujral", "megharathi", "mc srivastava", "masanjeev", "manojsahni", "manojchauhan", "manishthakur", "MANISHkumar", "anirban pathak", "anilkumargupta", "amrishaggarwal", "amrinakausar", "amitverma", "amanpreetkaur", "alokchauhan", "alkasharma", "Alka Choubey", "akvadehra", "adwitiyasinha", "aditisharma", "aditijain", "adarshkumar", "abhinavgupta", "abbhattacharyya", "aayusheegupta", "ankitagupta", "ankitawadhwa", "ankurbhardwaj", "anshubanwari", "anuja", "anujbhardwaj", "anujgupta", "aradhanagoyal", "archanapurwar", "arpitajadhav", "ashishgoel", "ashokwahi", "ashwanimathur", "asitbandyopadhayay", "atulsrivastava", "ayushi gupta", "badribajaj", "bani singh", "BhagwatiPrasad", "bharatgupta", "bhawnagupta", "bhubeshjoshi", "chakreshjain", "chetnadabas", "chetnagupta", "debdeepde", "deepaksharma", "deepaliverma", "deependerdabas", "dhanalekshmig", "dharmveerrajpoot", "divakaryadav", "gagandeepkaur", "Gagandeepsingh", "garimakapur", "garimamathur", "gauravverma", "gkagarwal", "gssrivastava", "heman", "hemantmeena", "himagupta", "hsdagar", "indirasarethy", "induchawla", "ipsitanandi", "jasminesaini", "jhumursengupta", "jitendramishra", "jpgupta", "juhi", "kamalrawal", "kanishksingh", "kanupriyamisra", "kashavajmera", "kavitapandey", "kc mathur", "kenandini", "kirmendersingh", "kishorekumar", "kishorethapliyal", "krishna asawa", "krishnagopal", "krishnasundari", "kuldeepsingh", "lokendrakumar", "madhujain", "mahendragurve", "maneeshakarn", "manishasingh", "santosh dev", "santoshisen", "satishchandra", "sbhattacharya", "scsaxena", "shalinimani", "shamimakhter", "shardhaporwal", "sharmistha", "shikhajain", "shikhamehta", "shirinalavi", "shradhasaxena", "shrirampurankar", "shrutisabharwal", "shubhanginirathore", "shwetadang", "skkhanna", "skraina", "smritibhatnagar", "smritigaur", "somyajain", "sppurohit", "sreejithr", "ssuresh", "sudhasrivastava", "sujatakapoor", "sujatamohanty", "sumadawn", "sumeghayadav", "supratimdas", "supreetkaurbakshi", "sushantsadotra", "sushilkumar", "swatirawal", "swatisharma", "tajalam", "tanujchauhan", "tribhuwantewari", "tushitashukla", "vandanaahuja", "vibhagupta", "vibharani", "vijaykhare", "vikaspandey", "vikassaxena", "vikramkarwal", "vimalkumar", "vineetkhandelwal", "Vinkysharma", "vishalsaxena", "VivekDwivedi", "vivekmishra", "viveksajal", "yajmedury", "yashikarustagi", "yogeshgupta", "yogeshsingh" ] } res=mongo.db.teachers.insert(doc) except Exception as e: raise e
def add_to_index(index, model): if not es: return payload = {} es.index(index=index, doc_type=index, id=model.id, body=payload)
def add_to_index(model): payload = {} for field in model.__searchable__: payload[field] = getattr(model, field) es.index(index="datasets", doc_type='_doc', id=model.id, body=payload)
def index(path): """ Index all logs to ElasticSearch """ print("Using folder %s" % path) print("Check latest record...") try: f = open('latest.txt', 'r') last_ts = int(f.read()) f.close() except IOError: print("File not found, this is first run") last_ts = 0 print("Latest record time: %s" % datetime.fromtimestamp(int(last_ts))) print("Start reading logs:") max_ts = 0 for root, dirs, files in os.walk(path): path = root.split('/') for f in files: if f.endswith('html') and f.endswith('alt.html') is False and f != 'index.html': file_day = int(f.split('.')[0]) file_month = int(path[3]) file_year = int(path[2]) print("Logs of the %s-%s-%s day" % (file_year, file_month, file_day)) fpath = os.path.join(root, f) soup = BeautifulSoup(open(fpath), 'lxml') find_text = False timestamp = '' sender = '' message = '' for tag in soup.find('tt'): if isinstance(tag, element.Tag): classes = tag.get('class') if classes is not None: if classes[0] == 'timestamp': timestamp = tag.find('a').text[1:-1] elif classes[0] == 'self' or classes[0] == 'selfmoder': sender = tag.text[1:-1] find_text = True elif find_text and isinstance(tag, element.NavigableString): message += tag next_tag = tag.next_sibling if next_tag is not None: if next_tag.name == 'br': message += '\n' if next_tag.name == 'span': find_text = False if find_text is not True and timestamp != '' and sender != '' and message != '': t = timestamp.split(":") dt = datetime(file_year, file_month, file_day, int(t[0]), int(t[1]), int(t[2])) ts = time.mktime(dt.timetuple()) if ts <= last_ts: continue es.index(index=application.config['ELASTICSEARCH_KEY'], doc_type=application.config['ELASTICSEARCH_TYPE'], body= {'sender': sender, 'message': message.strip(), 'date': dt}) if application.config['LOG_DEBUG']: print("[%s] <%s>: %s" % (dt, sender, message.strip())) if ts > max_ts: max_ts = ts timestamp = '' sender = '' message = '' try: if max_ts > last_ts: f = open('latest.txt', 'w') f.write(str(int(max_ts))) f.close() except IOError: print("Can't write latest timestamp!") print int(max_ts)
def add_order(): orders = request.get_json() shopToStocks = Neo.get_all(orders['id_shop']) new_order = [] shopToStocks.sort(key=lambda x: x[1]) for goods in orders['goods']: redisdata = redis.hgetall(goods) if redisdata == {}: update_redis() all_stocks = {k: int(v) for k, v in redisdata.items()} for i in range(len(shopToStocks)): id_stock = shopToStocks[i][0] if id_stock in all_stocks.values(): try: cur_order = next(item for item in new_order if item['id_stock'] == id_stock) except StopIteration: cur_order = {} cur_quantity = Postgres.return_quantity(id_stock, goods) new_quantity = cur_quantity[0] - orders['goods'][goods] if new_quantity >= 0: if cur_order == {}: cur_order.update({'id_shop': orders['id_shop']}) cur_order.update({'id_stock': id_stock}) cur_order.update({'goods': {}}) cur_order['goods'].update( {goods: orders['goods'][goods]}) new_order.append(cur_order) else: new_order.remove(cur_order) cur_order['goods'].update( {goods: orders['goods'][goods]}) new_order.append(cur_order) if new_quantity == 0: Postgres.delete_state_by_id(id_stock, goods) redis.delete(goods) address_id = Postgres.return_address_and_id(goods) redis.hmset(goods, address_id) else: Postgres.update_quantity(goods, id_stock, new_quantity) break else: continue for order in new_order: id_order = Mongo.insert('orders', order, returned=True) body = { "date": f"{datetime.now():%Y-%m-%d}", "id_order": str(id_order) } es.index(index='orders', body=body) return '.'
def add(index, model): if es: fields = {} for field in model.__searchable__: fields[field] = getattr(model, field) es.index(index=index, id=model.id, body=fields)