def commit(self): self.f.close() dbh = DBHelper() dbh.batchExecute(["drop view new_nodes_"+self.Timestamp,\ "drop table new_relation_fathers_"+self.Timestamp,\ "drop view new_relation_members_"+self.Timestamp,\ "drop view new_way_nodes_"+self.Timestamp])
def registerVoice(): global pcode f = open("code.txt", "w") #playsound("C:/Users/Owner/PycharmProjects/final_capstone/response/greetings.mp3") #playsound("C:/Users/Owner/PycharmProjects/final_capstone/response/getcode.mp3") statePCode() print pcode f.write(pcode) f.close() file = readNewDataCode() #playsound("C:/Users/Owner/PycharmProjects/final_capstone/response/instruction.mp3") #playsound("C:/Users/Owner/PycharmProjects/final_capstone/response/again.mp3") #playsound("C:/Users/Owner/PycharmProjects/final_capstone/response/instruction.mp3") #playsound("C:/Users/Owner/PycharmProjects/final_capstone/response/instruction_2.mp3") for i in range(0, 4): if i + 1 == 1: print ("Command for turning the lights on") #playsound("C:/Users/Owner/PycharmProjects/final_capstone/response/lightsOn.mp3") elif i + 1 == 2: print ("Command for turning the lights off") #playsound("C:/Users/Owner/PycharmProjects/final_capstone/response/lightsOff.mp3") elif i + 1 == 3: print ("Command for closing the door") # playsound("C:/Users/Owner/PycharmProjects/final_capstone/response/closeDoor.mp3") elif i + 1 == 4: print ("Command for opening the door") # playsound("C:/Users/Owner/PycharmProjects/final_capstone/response/openDoor.mp3") recordAudio(i + 1) #playsound("C:/Users/Owner/PycharmProjects/final_capstone/response/tnx.mp3") #playsound("C:/Users/Owner/PycharmProjects/final_capstone/response/bye.mp3") db.upStatusCode(pcode) voice.convertToTextFile() return file
def doit(pathToExcel, tablename): #print 'reading table:', tablename rows = readXls(pathToExcel, tablename) header = ID_DBNAME_MAP.values() keyColNames = ID_DBNAME_MAP.values() DBHelper.updateDB(tablename, header, rows, keyColNames)
def settingSave(value): setlist = [] setlist.append(txt1.get()) setlist.append(txt2.get()) setlist.append(txt3.get()) setlist.append(txt4.get()) setlist.append(txt5.get()) setlist.append(txt6.get()) setlist.append(txt7.get()) setlist.append(txt8.get()) setlist.append(txt9.get()) setlist.append(txt10.get()) setlist.append(txt11.get()) setlist.append(txt12.get()) setlist.append(txt13.get()) setlist.append('esc') setlist.append(value / 50) setlist.append(txt15.get()) setlist.append(txt16.get()) DBHelper.insert(setlist[0], setlist[1], setlist[2], setlist[3], setlist[4], setlist[5], setlist[6], setlist[7], setlist[8], setlist[9], setlist[10], setlist[11], setlist[12], setlist[13], setlist[14], setlist[15], setlist[16]) processoExit()
def __init__(self): self.user_agent = [ "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; AcooBrowser; .NET CLR 1.1.4322; .NET CLR 2.0.50727)", "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Acoo Browser; SLCC1; .NET CLR 2.0.50727; Media Center PC 5.0; .NET CLR 3.0.04506)", "Mozilla/4.0 (compatible; MSIE 7.0; AOL 9.5; AOLBuild 4337.35; Windows NT 5.1; .NET CLR 1.1.4322; .NET CLR 2.0.50727)", "Mozilla/5.0 (Windows; U; MSIE 9.0; Windows NT 9.0; en-US)", "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 2.0.50727; Media Center PC 6.0)", "Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 1.0.3705; .NET CLR 1.1.4322)", "Mozilla/4.0 (compatible; MSIE 7.0b; Windows NT 5.2; .NET CLR 1.1.4322; .NET CLR 2.0.50727; InfoPath.2; .NET CLR 3.0.04506.30)", "Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN) AppleWebKit/523.15 (KHTML, like Gecko, Safari/419.3) Arora/0.3 (Change: 287 c9dfb30)", "Mozilla/5.0 (X11; U; Linux; en-US) AppleWebKit/527+ (KHTML, like Gecko, Safari/419.3) Arora/0.6", "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.2pre) Gecko/20070215 K-Ninja/2.1.1", "Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9) Gecko/20080705 Firefox/3.0 Kapiko/3.0", "Mozilla/5.0 (X11; Linux i686; U;) Gecko/20070322 Kazehakase/0.4.5", "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.8) Gecko Fedora/1.9.0.8-1.fc10 Kazehakase/0.5.6", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/535.20 (KHTML, like Gecko) Chrome/19.0.1036.7 Safari/535.20", "Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; fr) Presto/2.9.168 Version/11.52", ] ''' MONGODB_SERVER = '192.168.7.115' MONGODB_PORT = 27017 MONGODB_DB = 'movies' MONGODB_INFO_COLLECTION = 'information' ''' self.header = {"User-Agent": self.user_agent[0]} self.db_name = 'movies' self.collection_name = 'ip_pool' self.client = MongoClient('192.168.7.115', 27017) self.db = self.client[self.db_name] GlobalVar.set_mq_client(self.client) GlobalVar.set_db_handle(self.db) self.alived = 0
def appoint(feast_id): def send_email(): print("【发送邮件】") #Email.send_plain_text(["*****@*****.**"], "您的约饭人齐啦", "您的约饭人齐啦", "今天吃啥") DBHelper.new_appoint(feast_id, session.get('user_id'), send_email) return redirect("/group")
def getCymonPage( day, tag, adrtype, limit, offset): url = 'https://cymon.io/api/nexus/v1/blacklist/' url += adrtype + '/' + tag url += '/?days=' + day url += '&limit=' + limit url += '&offset=' + offset print 'URL: ', url headers = {'accept':'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 'accept-encoding':'gzip, deflate, sdch', 'accept-language':'vi,en;q=0.8', 'cache-control':'max-age=0', 'cookie':'__utmt=1; VT_PREFERRED_LANGUAGE=en; __utma=194538546.508165789.1453801379.1454133997.1454134465.7; __utmb=194538546.18.10.1454134465; __utmc=194538546; __utmz=194538546.1454134465.7.2.utmcsr=google|utmccn=(organic)|utmcmd=organic|utmctr=(not%20provided)', 'upgrade-insecure-requests':'1', 'user-agent':'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.97 Safari/537.36}'} r = requests.get(url, headers) raw = json.loads(r.text) result = raw['results'] if len(result) == 0: print '\t\b' for item in result: ShowProgress.show(result.index(item) + 1, len(result)) if not ('addr' in item.keys()): item['addr'] = item['name'] DBHelper.updateAddress({'address': item['addr'], 'address_type': adrtype}) return raw['count']
def upload_thief_face(): # Determines thief ID before adding the data to the database. # Thief ID is determined from number of user dataset in database # Later on it starts face recognition system and uploads it into the database based on the given thief ID. thieves = DBHelper.db.child("Thieves").get() try: count = 1 for thief in thieves.each(): count += 1 Facial_Recognition_Thief_Registration.register_your_face("Thief_" + str(count)) Parallel(n_jobs=multiprocessing.cpu_count())( delayed(upload_parallel_thief_photos)(i, count) for i in range(10)) date = datetime.now().strftime("%d/%m/%Y") time = datetime.now().strftime("%H:%M:%S") DBHelper.upload_thief_data("Thief_" + str(count), date, time) print("An intruder is recorded.") except: Facial_Recognition_Thief_Registration.register_your_face("Thief_1") Parallel(n_jobs=multiprocessing.cpu_count())( delayed(upload_parallel_thief_photo)(i) for i in range(10)) date = datetime.now().strftime("%d/%m/%Y") time = datetime.now().strftime("%H:%M:%S") DBHelper.upload_thief_data("Thief_1", date, time) print("An intruder is recorded.")
def new_suggestions(msg, food_id): user_id = session.get('user_id') num = 3 recommender = recs[session.get("rec")] mc = operator.methodcaller(msg, food_id) mc(recommender) recommendation = recommender.get_recommend(num) recommend_list = [] for record in recommendation: food_id = record[0] fitness = record[1] print(food_id) food = DBHelper.get_food(food_id) food_name = food[0] restaurant = food[1] score = DBHelper.get_avg_score(food_id) image = DBHelper.get_img(food_id) print(image) record_dict = dict(food_id=food_id, food_name=food_name, restaurant=restaurant, image=image, score=score) recommend_list.append(record_dict) print(recommend_list) context = {'recommendation': recommend_list} return render_template('02-suggestions.html', **context)
def suggestions(): user_id = session.get('user_id') # food_id = session.get('food_id') food_id = 1 num = 3 recommender = FoodRecommender.FoodRecommender(user_id) recs.append(recommender) session["rec"] = len(recs) - 1 recommendation = recommender.get_recommend(num) recommend_list = [] for record in recommendation: food_id = record[0] fitness = record[1] print(food_id) food = DBHelper.get_food(food_id) food_name = food[0] restaurant = food[1] score = DBHelper.get_avg_score(food_id) image = DBHelper.get_img(food_id) print(image) record_dict = dict(food_id=food_id, food_name=food_name, restaurant=restaurant, image=image, score=score) recommend_list.append(record_dict) print(recommend_list) context = {'recommendation': recommend_list} return render_template('02-suggestions.html', **context)
def signup(): if request.method =='POST': data = request.form fname = data['fname'] lname = data['lname'] email = data['email'] pwd = data['pwd'] age = data['age'] sex = data['sex'] medu = data['medu'] mjob = data['mjob'] fjob = data['fjob'] reason = data['reason'] studytime = data['studytime'] schoolsup = data['schoolsup'] goout = data['goout'] higher = data['higher'] dalc = data['dalc'] walc = data['walc'] famsup = data['famsup'] isLogged = '1' email_ = email.replace("@", "_") dbh.enteruser(fname, lname, email_, pwd, age, sex, medu, mjob, fjob, reason, studytime, schoolsup, goout, higher, dalc, walc, famsup, isLogged) applicationdata.append(email_) applicationdata.append(isLogged) print(applicationdata) return redirect(url_for('check')) else: return render_template('signup.html')
def update_your_face(firstname, lastname, email, phone): # Determines user ID before adding the data to the database. # User ID is determined from number of user dataset in database # Later on it starts face recognition system and uploads it into the database based on the given user ID. users = DBHelper.db.child("Users").get() print("Updating the User information...") try: count = 0 for user in users.each(): count += 1 if DBHelper.get_email("User_" + str(count)) == email: break print("Face registration start...") Facial_Recognition_Registration.register_your_face("User_" + str(count)) Parallel(n_jobs=multiprocessing.cpu_count())( delayed(update_parallel_user_photos)(i, count) for i in range(10)) DBHelper.upload_data("User_" + str(count), firstname, lastname, email, phone) print("Data saved! Starting enrollment...") Facial_Recognition_Enrollment.enroll_face_dataset() print("Face registration completed!") print("Success.") except: print("It seems there is no user registered.")
def updateprofile(): email_param = applicationdata[0] try: if request.method =='POST': data = request.form fname = data['fname'] lname = data['lname'] email = data['email'] pwd = data['pwd'] age = data['age'] sex = data['sex'] medu = data['medu'] mjob = data['mjob'] fjob = data['fjob'] reason = data['reason'] studytime = data['studytime'] schoolsup = data['schoolsup'] goout = data['goout'] higher = data['higher'] dalc = data['dalc'] walc = data['walc'] famsup = data['famsup'] email_ = email.replace("@", "_") dbh.updateprofile(email_param, fname, lname, email_, pwd, age, sex, medu, mjob, fjob, reason, studytime, schoolsup, goout, higher, dalc, walc, famsup) return 'updated' except Exception as e: print(e) return 'not updated'
def doit(pathToExcel, tablename): #print 'reading table:', tablename rows = readXls(pathToExcel, tablename) header = ID_DBNAME_MAP.values() keyColNames = ID_DBNAME_MAP.values() DBHelper.updateDB(tablename,header,rows,keyColNames)
def history(): user_id = session.get('user_id') user_history = DBHelper.get_history(user_id) # food_name,time,restaurant,history.food_id history_list = [] for record in user_history: food_name = record[0] t = record[1] restaurant = record[2] food_id = record[3] print(food_id) score = DBHelper.get_avg_score(food_id) image = DBHelper.get_img(food_id) print(image) time_array = time.localtime(t) time_format = time.strftime("%Y-%m-%d %H:%M:%S", time_array) record_dict = dict(food_name=food_name, time_format=time_format, restaurant=restaurant, image=image, score=score) history_list.append(record_dict) print(history_list) context = {'history': history_list} return render_template('01-history.html', **context)
def _parse_articles(self, info, msg_id, post_time, msg_type): """解析嵌套文章数据并保存入库""" title = info.get('title') # 标题 cover = info.get('cover') # 封面图 author = info.get('author') # 作者 digest = info.get('digest') # 关键字 source_url = info.get('source_url') # 原文地址 content_url = info.get('content_url') # 微信地址 # ext_data = json.dumps(info, ensure_ascii=False) # 原始数据 content_url = content_url.replace('amp;', '').replace( '#wechat_redirect', '').replace('http', 'https') content = self.crawl_article_content(content_url) db = DBHelper() table = 'tb_article' model = { 'biz': self.biz, 'msg_id': msg_id, 'title': title, 'author': author, 'cover': cover, 'digest': digest, 'source_url': source_url, 'content_url': content_url, 'post_time': post_time, 'mps_id': self.mps_id, 'content': content, 'msg_type': msg_type } article_id = db.insert(table, model, True) db.close()
def food_detail(food): food_id = DBHelper.get_food_id(food, '') img = DBHelper.get_img(food_id) score = DBHelper.get_avg_score(food_id) taste = DBHelper.get_avg_taste(food_id) hot = taste[0] salty = taste[1] sweet = taste[2] sour = taste[3] oily = taste[4] food_dict = dict(food_name=food, img=img, score=score, sour=sour, sweet=sweet, hot=hot, salty=salty, oily=oily) comments = DBHelper.get_comment(food_id) comment_list = [] for c in comments: if c[0] is not None: comment_list.append(c[0]) print(comment_list) context = {'food_dict': food_dict, 'comments': comment_list} return render_template('food-detail.html', food=food, **context)
class Note: # Private Variables _note_id = '' _notepad_id = '' _note_creator = '' _note_content = '' _note_title = '' _note_db_query = '' _note_db_writer = DBHelper.DBWriter() _note_db_reader = DBHelper.DBReader() #Constructor method def __init__(self): #This will eventually sync with the ID in the DB self._note_id = 0 #Function to set Note def create_note(self, content_to_add, creator): self._note_creator = creator self._note_content = content_to_add self._note_title = self._determine_title(content_to_add + '\n') #include a '\n' in case it's a one-liner #Prep DB Query self._note_db_query = "INSERT INTO notes (note_content, note_title, note_creator, note_date_updated, notepad_id_key)" self._note_db_query += "VALUES ('" + self._note_content + "','" + self._note_title + "','" + self._note_creator + "','" self._note_db_query += str(datetime.datetime.now()) + "'," + str(self._notepad_id) + ");" #Write to DB self._note_db_writer.db_update(self._note_db_query) #Read back new ID self._note_id = self._note_db_reader.db_get_last_row_id() #Updates the note with the specified content def update_note(self, content_to_update): self._note_content = str(content_to_update) self._note_title = self._determine_title(content_to_update + '\n') #, note_title, note_date_updated)" #Prep DB Query self._note_db_query = "UPDATE notes SET note_content='" + self._note_content + "'," self._note_db_query += "note_title='" + self._note_title + "'," self._note_db_query += "note_date_updated='" + str(datetime.datetime.now()) + "' " self._note_db_query += "WHERE note_id = " + self._note_id + ";" #Write to DB self._note_db_writer.db_update(self._note_db_query) #Function to set the notepad ID for the note def assign_notepad(self, notepad_id): self._notepad_id = notepad_id #Determines the title from the content #by finding the first new line character. def _determine_title(self, title_to_cut): return title_to_cut.split('\n', 1)[0]
def __init__(self, dbFileName): self.db = DBHelper(dbFileName) self.srcURL = "http://bus.asan.go.kr/mobile/traffic/searchBusStopRoute" self.fake_header = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.142 Safari/537.36', 'Referer': 'http://bus.asan.go.kr/web/bus_arrInfo_pop?busStopId=288000863' }
def GetNameById(cls, wid): dbh = DBHelper() raw = dbh.executeAndFetchAll( "select v from current_way_tags where way_id=%s and k like %s limit 1", params=(wid, "name%")) if raw != None and len(raw) > 0: return raw[0][0] else: return ""
def updateBudget(filename, fy): print "loading budget table..." headerdummy, rows = CSVHelper.readCsv(filename) newheader, newrows = convertKeyToDB(rows, fy) #setYear(fy) #addAdditionalColumn(newrows, BUDGET_ADDITIONAL_COLUMN) DBHelper.updateDB(BUDGET_TABLENAME, newheader, newrows, BUDGET_KEY_COLUMN)
def do_check_ips_thread(selfs): while(selfs.alived): res = GlobalVar.get_db_handle()[selfs.collection_name].find({}).sort('datetime', 1).limit(1) if res.count()>0: if selfs.is_alive_ip_port([res[0]['ip'], res[0]['port']]) == 0: GlobalVar.get_db_handle()[selfs.collection_name].remove(res[0]) continue GlobalVar.get_db_handle()[selfs.collection_name].update({'ip': res[0]['ip']},{'$set': {'datetime': datetime.datetime.now()}},True) else : time.sleep(600)
def vote_video(request): user_id = request.authenticated_userid vote = request.matchdict['vote'] video_id = int(request.matchdict['video_id']) topic_id = DBHelper.get_video(video_id).topic_id change = DBHelper.vote_video(user_id, video_id, vote) feed = Feed() feed.update_video_score(video_id, topic_id, change) return {'change': change}
def validateAddress(address): print 'Current address : ', address['address'], ' Type : ', address[ 'address_type'] print 'Crawling from virustotal.com : ', detections = __getDetections( __getDetectedSoups(__getSoupFromAddresses(address))) print 'OK' address.update({'detections': detections}) DBHelper.updateAddress(address)
def frontpage(request): add_video_url = request.route_url('add_video') user_id = request.authenticated_userid user = DBHelper.get_user_from_id(user_id) topics = DBHelper.get_all_topics() topic_ids = [x.id for x in topics] feed = Feed() all_videos = feed.build_feed(user_id, topic_ids) return {'videos': all_videos, 'logged_in': user, 'topics':topics}
def GetNid2Coord(rebuild=False): if OtherUtils.Nid_Coord == None or rebuild: dbh = DBHelper() raw = dbh.executeAndFetchAll( "select id, latitude, longitude from current_nodes where visible=1" ) x = [tp[0] for tp in raw] y = [[tp[1], tp[2]] for tp in raw] OtherUtils.Nid_Coord = dict(zip(x, y)) return OtherUtils.Nid_Coord
def validateAddress(address): print 'Current address : ', address['address'], ' Type : ', address['address_type'] print 'Crawling from virustotal.com : ', detections = __getDetections( __getDetectedSoups( __getSoupFromAddresses(address))) print 'OK' address.update({'detections' : detections}) DBHelper.updateAddress(address);
def get_one_alive_ip(self): while(1): res = GlobalVar.get_db_handle()[self.collection_name].find({}).sort('datetime', -1).limit(1) if res.count()>0: if self.is_alive_ip_port([res[0]['ip'], res[0]['port']]) == 0: GlobalVar.get_db_handle()[self.collection_name].remove(res[0]) continue GlobalVar.get_db_handle()[self.collection_name].update({'ip': res[0]['ip']},{'$set': {'datetime': datetime.datetime.now()}},True) return res[0] return 0
def clear(self): self.__root_points = [] #root_points[i] = (lat1, lon1) self.__target_points = [] #target_points[i] = (lat1, lon1) self.__target_lines = [ ] # target_lines[i] = [(lat1,lon1)...(latn,lonn)] self.__background_lines = [ ] # background_lines[i] = [(lat1,lon1)...(latn,lonn)] self.__target_bounder = tuple() #(minLat, minLon, maxLat, maxLon) self.__target_range = tuple() # ((lat,lon),radius) self.__dbh = DBHelper() self.__url = ""
def build_feed(self, user_id, topics, start=0, end=25): #sort by votes count, unnormalized hottest = self.hottest_videos(topics) #each item is (video, score, vote by this user) videos = [ ( DBHelper.get_video(x[0]), x[1], DBHelper.vote_by_user(x[0], user_id) ) for x in hottest[start:end] ] return videos
def doit(filename, option): header, rows = CSVHelper.readCsv(filename) newheader, newrows = analyzeUserTree(option, rows) print "loading workflow to DB...." print " flows to import: ", newrows if option == OPTION_USER: key_columns = ROUTES_PERSON_KEY_COLUMNS else: key_columns = ROUTES_KEY_COLUMNS DBHelper.updateDB(ROUTES_TABLENAME, newheader, newrows, key_columns)
def __init__(self, id_to_specify): _note_db_writer = DBHelper.DBWriter() if _note_db_writer.db_check_notepad_id(id_to_specify) == 0: self._id = id_to_specify else: print "Error: This ID already exists."
def process(dataFilePath,logfilename = 'sqliteDataProcess.log'): import log DBlog = log.set_logger(filename = logfilename, isOnlyFile = False) DBlog.debug('sqlite start!') global sqlite3Obj sqlite3Obj = DBHelper.Sqlite3Helper(dataFilePath) sqlite3Obj.open(check_same_thread=False) while True: role,index = yield if isinstance(role, str): return try: # 跳过账号登陆不成功的 if role.ERRORList[0] != 0: # 首次录入 firstEntering(role, sqlite3Obj) # 查询本学期成绩 并更新数据库 # currentEntering(role, sqlite3Obj) # 更新学生个人信息 # updateStudentInfo(role, sqlite3Obj) DBlog.info('%d : %s firstEntering Success!' % (index, role.userId)) except Exception as e: DBlog.error('%d : %s firstEntering Exception!\n -- %s' % (index,role.userId, traceback.format_exc())) else: pass finally: pass
class Notepad: #Private Variables _id = 0 _notes = [] _note_db_deleter = DBHelper.DBDeleter() #Constructor method def __init__(self, id_to_specify): _note_db_writer = DBHelper.DBWriter() if _note_db_writer.db_check_notepad_id(id_to_specify) == 0: self._id = id_to_specify else: print "Error: This ID already exists." #Add note to the list def add_note(self, content): new_note = Note() new_note.assign_notepad(self._id) new_note.create_note(content) self._notes.append(new_note) #Remove note at id def remove_note(self, note_id): self._note_db_deleter.db_delete(note_id) del self._notes[note_id]
def login(request): login_url = request.route_url('login') referrer = request.url if referrer == login_url: # never use the login form itself as came_from referrer = request.route_url('home') came_from = request.params.get('came_from', referrer) message = '' handler = '' password = '' if 'form.submitted' in request.params: handler = request.params['handler'] password = request.params['password'] user = DBHelper.get_user_from_handler(handler) print user.id, user.handler if user and user.authenticate(password): headers = remember(request, user.id) return HTTPFound(location = came_from, headers = headers) message = 'Failed login' return dict( message = message, url = request.application_url + '/login', came_from = came_from, handler = handler, password = password, )
def Build(cls, rebuild=False): if DistanceUtils.kd_tuple != None and not rebuild: return print ">>>>> Initalize DistanceUtils ..." dbh = DBHelper() all_nodes = dbh.executeAndFetchAll( "select id,latitude,longitude from current_nodes where visible = 1" ) # build idx->node_id map idx_map = {} for i in xrange(len(all_nodes)): idx_map[i] = all_nodes[i][0] # build kdtree x = [item[1] for item in all_nodes] y = [item[2] for item in all_nodes] tree = spatial.KDTree(zip(x, y)) DistanceUtils.kd_tuple = (idx_map, tree) print ">>>>> Done Initalization"
def doit(filename, option): header,rows = CSVHelper.readCsv(filename) newheader,newrows = analyzeUserTree(option,rows) print "loading workflow to DB...." print " flows to import: ", newrows if option == OPTION_USER: key_columns = ROUTES_PERSON_KEY_COLUMNS else: key_columns = ROUTES_KEY_COLUMNS DBHelper.updateDB( ROUTES_TABLENAME, newheader, newrows, key_columns )
def Build(cls, rebuild=False): if len(WayNameUtils.Name2id_way ) != 0 and WayNameUtils.BKTree_way != None and not rebuild: return print ">>>>> Initalize WayNameUtils ..." if (os.path.exists(WORK_DIR + "data/Name2id_way.dat")) and not rebuild: print ">>>>> Loading ..." WayNameUtils.Name2id_way = joblib.load(WORK_DIR + "data/Name2id_way.dat") else: print ">>>>> Generating ..." dbh = DBHelper() data = dbh.executeAndFetchAll( "select way_id,v from current_way_tags where k='name' or k='name:zh'" ) for pair in data: cname = WayNameUtils.cleanName(pair[1]) if WayNameUtils.Name2id_way.has_key(cname): WayNameUtils.Name2id_way[cname].append(pair[0]) else: WayNameUtils.Name2id_way[cname] = [pair[0]] joblib.dump(WayNameUtils.Name2id_way, WORK_DIR + "data/Name2id_way.dat", compress=3) if (os.path.exists(WORK_DIR + "data/BKTree_way.dat")) and not rebuild: print ">>>>> Load BKTree ..." WayNameUtils.BKTree_way = joblib.load(WORK_DIR + "data/BKTree_way.dat") else: if rebuild: if WayNameUtils.BKTree_way == None: WayNameUtils.BKTree_way = joblib.load( WORK_DIR + "data/BKTree_way.dat") print ">>>>> Rebuild BKTree ..." WayNameUtils.BKTree_way.alter( new_words=WayNameUtils.Name2id_way.keys()) else: print ">>>>> Build BKTree ..." WayNameUtils.BKTree_way = BKTree( words=WayNameUtils.Name2id_way.keys()) joblib.dump(WayNameUtils.BKTree_way, WORK_DIR + "data/BKTree_way.dat", compress=3) print ">>>>> Done Initalization"
def crawl(): today = datetime.datetime.today().strftime("%Y-%m-%d") print "Start crawling: " pulse_num = 0 # API request url of alienvault url = 'https://otx.alienvault.com/otxapi/search/?q=&sort=null&limit=100&page=1' while True: r = requests.get(url) for pulse in r.json()["results"]: #check if this pulse modified today checkday = pulse["modified"].split('T')[0] if checkday != today: print "Done" return pulse_num +=1 print "Crawling from pulse " + str(pulse_num) #parse pulse's data to get all addresses, then add them to the db pulse_url = 'https://otx.alienvault.com/otxapi/pulses/' + pulse["id"] + '/indicators/?limit=9000&page=1' p_response = requests.get(pulse_url) data = p_response.json()["results"] ip_num = 0 domain_num = 0 for indicator in data: if indicator["type"] in ["IPv4", "IPv6"]: ip_num +=1 item = {"address": indicator["indicator"], "address_type": "ip"} DBHelper.updateAddress(item); if indicator["type"] == "domain": domain_num +=1 item = {"address": indicator["indicator"], "address_type": "domain"} DBHelper.updateAddress(item); print "Crawled " + str(ip_num) + " ip addresses and " + str(domain_num) + " domains" + "\n" url = r.json()["next"]
def add_video(request): save_url = request.route_url('add_video') topics = DBHelper.get_all_topics() video = Video(title='', description='', url='', topic_id=0) message = None if 'form.submitted' in request.params: title = request.params['title'] description = request.params['description'] url = request.params['url'] topic_id = request.params['topic'] user_id = request.authenticated_userid video = Video(title=title, description=description, url=url, owner_id=user_id, topic_id=topic_id) if DBHelper.add_video(video): feed = Feed() feed.update_video_score(video_id, topic_id, 0) return HTTPFound(location=request.route_url('home')) else: message = "Error while adding video" return {'video':video, 'save_url':save_url, 'topics':topics, 'message':message}
def register(request): handler = '' password = '' message = '' if 'form.submitted' in request.params: handler = request.params['handler'] password = request.params['password'] user = DBHelper.add_user(handler, password) if user: headers = remember(request, user.id) return HTTPFound(location=request.route_url('home'), headers = headers) else: message = "Username taken" return dict( message = message, url = request.application_url + '/register', handler = handler, password = password )
def doit(tablename, columns): rows = DBHelper.queryTable(tablename, columns) CSVHelper.printRows(columns,rows)
def unsubscribe_topic(request): user_id = request.authenticated_userid topic_id = request.matchdict['topic_id'] DBHelper.unsubscribe_topic(user_id, topic_id) return last_location_or_home(request)
def delete_video(request): video_id = request.matchdict['video_id'] video = DBHelper.get_video(video_id) DBSession.delete(video) return HTTPFound(location = request.route_url('home'))
def __getAddressesFromDB(): return DBHelper.getAddresses() pass