async def cast(self, ctx: Context, votename: str): if not dm.vote_exists(votename): await ctx.send("Vote doesn't exist!") return self._ensure_user_exists(ctx.author) vote = dm.lookup_vote_by_votename(votename) msg = f"Please chose the order in which you want to vote for {vote.name}:\n\n" for i, option in enumerate(vote, start=1): msg+=f"{i}.- {option}\n" await ctx.send(msg) def check(m): if not (m.content.replace(" ","").isnumeric() and m.channel == ctx.channel and m.author == ctx.author): return False numlist = list(map(int, m.content.split())) unique = bool(len(numlist) == len(set(numlist))) return (unique and len(numlist) <= len(vote) and max(numlist) <= len(vote)) try: ranks = await self.bot.wait_for("message", timeout=60, check=check) except asyncio.TimeoutError: await ctx.send("Timed out") return ranklist = list(map(int, ranks.content.split())) dm.cast_vote(Ballot(str(ctx.author.id), votename, ranklist)) await ctx.send("Voted sucessfully!")
def start(): rep = urllib2.Request(base_url, headers=header) response = urllib2.urlopen(rep) soup = BeautifulSoup(response.read(), "html.parser") history_list = [] items = soup.select('.list li a') for item in items: history_dict = {} history_dict['title'] = item.get('title') history_dict['url'] = item.get('href') history_dict['content'] = startContent(history_dict['url']) # history.content = ''.join(startContent(history.url)) # history.content = json.dumps(startContent(history.url), ensure_ascii=False) history_list.append(history_dict) print('读取完成') print('-----------------') DatabaseManager.insertHistoryList(history_list) # print type(history_list) # print json.dumps(history_list,ensure_ascii=False) return json.dumps(history_list,ensure_ascii=False)
def test_delete_db(): isDelete = True try: DM.removeData(RARE_STOCK_NAME, "Y") except: isDelete = False assert isDelete
async def represent(self, ctx: Context, target: discord.Member = None): self._ensure_user_exists(ctx.author) if target is None or target == ctx.author: dm.represent(ctx.author.id, "") await ctx.send("No one is representing you now!") return self._ensure_user_exists(target) dm.represent(ctx.author.id, target.id) await ctx.send(f"{target.nick} now represents you!")
def dbConnect (self,db_name): db_username = gv.db_username db_password = gv.db_password db_host = gv.db_host #db_name = gv.name_database db_port = gv.db_port global databaseObject databaseObject = DatabaseManager(db_username, db_password,db_host,db_port, db_name) databaseObject.Connect()
def dbConnect (self): db_username = gv.userName db_password = gv.password db_host = gv.dbHost db_name = gv.databaseName db_port = gv.dbPort db_connector = gv.dbConnector global databaseObject databaseObject = DatabaseManager(db_connector, db_username, db_password,db_host,db_port, db_name) databaseObject.Connect()
def dbConnect(self): db_username = '******' db_password = '******' db_host = '127.0.0.1' db_name = gv.databaseName db_port = '3306' global databaseObject databaseObject = DatabaseManager(db_username, db_password, db_host, db_port, db_name) databaseObject.Connect()
async def countup(self, ctx: Context, votename: str, method: str = "fptp"): ballot_collection = [ CountedBallot(ballot=ballot, count= 1 + len(self._get_representing_users(ballot.userid, votename))) for ballot in dm.lookup_ballots_by_vote(votename) ] vote = dm.lookup_vote_by_votename(votename) result = election.election[method](ballot_collection, vote) await ctx.send(result)
def start(self): for index in range(self.startIndex,self.endIndex): word = self.searchWordsAsSeries[index] try: DatabaseManager.set_query_count('+'.join(word.split()),self.__search(word)) except: print "stuck index: %d, %s "%index, word self.__ifError(word) if index % 100 ==0 and index!=0: print "pass 100 : %d"%index print "doneait"
def test_store_db(): candle = SM.getCandle(RARE_STOCK_NAME) candle["timeframe"] = "Y" candle["quality"] = "high" isStore = True try: DM.storeData(RARE_STOCK_NAME, candle) except: isStore = False assert isStore
def run(self): self.__database_manager = DatabaseManager.DatabaseManager() self.__scanner_data_provider = ScannerDataProvider.ScannerDataProvider( ) self.__scanner_data_provider.start() self.__running_lock.acquire() self.__is_running = True self.__running_lock.release() self.__thread_timer = time.time() self.__data_timer = time.time() while True: if time.time() - self.__thread_timer > 1000.0 / 1000.0: self.__running_lock.acquire() condition = self.__is_running self.__running_lock.release() if bool(condition) is False: break self.__thread_timer = time.time() if time.time() - self.__data_timer > 100.0 / 1000.0: dict_scanner_data = self.__scanner_data_provider.get_scanner_data( ) self.__store_in_db(dict_scanner_data) self.__store_in_queue(dict_scanner_data) self.__data_timer = time.time() # Wait for the threads to stop self.__scanner_data_provider.stop() self.__scanner_data_provider.join() print '[Data Manager] scanner_data_provider stopped '
def getCompleteMovies(movies): # Step 2: search over databse to avoid extra Douban/IMDB query movies_complete = [] movies_incomplete = [] for movie in movies: saved = DatabaseManager.getMovie(movie) if saved is None: movies_incomplete.append(movie) # TODO: re-enable loading from database after a while # else: # fillMovieInfoWithSavedMovie(movie, saved) # movies_complete.append(movie) # print 'complete movies #: {0}'.format(len(movies_complete)) # print 'incomplete movies #: {0}'.format(len(movies_incomplete)) # Step 3: query Douban/IMDB to fill incomplete movies for movie in movies_incomplete: # Douban Movie API is not available anymore # Douban.fillMovieInfo(movie) IMDB.fillMovieInfo(movie) # TODO: re-enable writing to database after a while # DatabaseManager.addMovie(movie) movies_complete.append(movie) return movies_complete
def __init__(self, cogs, output_path, db_args, **args): super().__init__(**args) self.db_manager = DatabaseManager.DatabaseManager(db_args, output_path) self.connected = False self.output_path = output_path for cog in cogs: self.load_extension(cog)
def turkiyeVeri(self): data = DatabaseManager.sqliteData("Turkiye_Data") trVeri = data.getData() print(trVeri[-1]) trNufus = 83614362 self.ui.TToplamSayiLbl.setText(trVeri[-1][1]) self.ui.TVakaArtisLbl.setText("Dün bildirilen Vaka sayısı : " + trVeri[-1][2]) self.ui.TVefatSayiLbl.setText(trVeri[-1][3]) self.ui.TVefatArtisLbl.setText("Dün bildirilen vefat sayısı : " + trVeri[-1][4]) self.ui.TDozSayiLbl.setText(trVeri[-1][5]) self.ui.TDozArtisLbl.setText("Dün bildirilen doz sayısı : " + trVeri[-1][6]) self.ui.TAsiOlanSayiLbl.setText(trVeri[-1][7]) self.ui.TAsiOlanYuzdeLbl.setText("Nüfus yüzdesi : " + trVeri[-1][8]) self.ui.TVakaPrgLbl.setText("Toplam Vaka Sayısı : " + trVeri[-1][1]) self.ui.TAsiOlanSayiPrgLbl.setText("Toplam Aşı Olan Sayısı : " + trVeri[-1][7]) vakaSayisi = trVeri[-1][1].replace(".", "") yuzde = (int(vakaSayisi) * 100) / trNufus self.progressBarValue(yuzde, self.ui.VakaPrg) self.ui.TVakaYuzdeLbl.setText(str(int(yuzde)) + " %") self.progressBarValue(int(trVeri[-1][8].lstrip("%").split(",")[0]), self.ui.asiPrg) self.ui.TAsiYuzdeLbl.setText(trVeri[-1][8].lstrip("%").split(",")[0] + " %")
def dunyaVeri(self): data = DatabaseManager.sqliteData("Global_Data") trVeri = data.getData() print(trVeri[-1]) dNufus = 7859092000 self.ui.DToplamSayiLbl.setText(trVeri[-1][1]) self.ui.DVakaArtisLbl.setText("Dün bildirilen Vaka sayısı : " + trVeri[-1][2]) self.ui.DVefatSayiLbl.setText(trVeri[-1][3]) self.ui.DVefatArtisLbl.setText("Dün bildirilen vefat sayısı : " + trVeri[-1][4]) self.ui.DDozSayiLbl.setText(trVeri[-1][5]) self.ui.DDozArtisLbl.setText("Dün bildirilen doz sayısı : " + trVeri[-1][6]) self.ui.DAsiOlanSayiLbl.setText(trVeri[-1][7]) self.ui.DAsiOlanYuzdeLbl.setText("Nüfus yüzdesi : " + trVeri[-1][8]) self.ui.DVakaPrgLbl.setText("Toplam Vaka Sayısı : " + trVeri[-1][1]) self.ui.DAsiOlanSayiPrgLbl.setText("Toplam Aşı Olan Sayısı : " + trVeri[-1][7]) vakaSayisi = trVeri[-1][1].replace(".", "") yuzde = (int(vakaSayisi) * 100) / dNufus self.progressBarValue(yuzde, self.ui.VakaPrg_6) self.ui.DVakaYuzdeLbl.setText(str(int(yuzde)) + " %") self.progressBarValue(int(trVeri[-1][8].lstrip("%").split(",")[0]), self.ui.asiPrg_6) self.ui.DAsiYuzdeLbl.setText(trVeri[-1][8].lstrip("%").split(",")[0] + " %")
def __init__(self, config_path, cla_dict): trulia_conf = TruliaConfLoader.TruliaConfLoader(config_path) self.load_trulia_params(trulia_conf) self.db_mgr = DatabaseManager.DatabaseManager(config_path) kv_store = cla_dict['kv_store'] if kv_store == '': self.kv_mgr = None elif kv_store == 'h': print "loading HBase manager", import HBaseManager self.kv_mgr = HBaseManager.HBaseManager() print "completed" elif kv_store == 'r': print "loading Redis manager", import RedisManager self.kv_mgr = RedisManager.RedisManager() print "completed" fluentd_rx = cla_dict['fluentd_rx'] if fluentd_rx == '': self.fluentd_enabled = False print "FluentD not enabled" elif fluentd_rx == 'f': print "loading fluentd for local fs" sender.setup('fs') self.fluentd_enabled = True print "FluentD enabled for local filesystem" elif fluentd_rx == 'h': print "loading fluentd for hdfs" sender.setup('hdfs') self.fluentd_enabled = True print "FluentD enabled for HDFS"
def adminHome(): searchBarForm = Forms.SearchBarForm(request.form) searchBarForm.topic.choices = get_all_topics('all') if request.method == 'POST' and searchBarForm.validate(): return redirect( url_for('searchPosts', searchQuery=searchBarForm.searchQuery.data, topic=searchBarForm.topic.data)) sql = "SELECT post.PostID, post.Title, post.Content, post.Upvotes, post.Downvotes, post.DatetimePosted, user.Username,topic.TopicID, topic.Content AS Topic FROM post" sql += " INNER JOIN user ON post.UserID=user.UserID" sql += " INNER JOIN topic ON post.TopicID=topic.TopicID" sql += " ORDER BY post.PostID DESC LIMIT 6" dictCursor.execute(sql) recentPosts = dictCursor.fetchall() for post in recentPosts: if sessionInfo['login']: currentVote = DatabaseManager.get_user_post_vote( str(sessionInfo['currentUserID']), str(post['PostID'])) if currentVote == None: post['UserVote'] = 0 else: post['UserVote'] = currentVote['Vote'] else: post['UserVote'] = 0 post['TotalVotes'] = post['Upvotes'] - post['Downvotes'] post['Content'] = post['Content'][:200] return render_template('adminHome.html', currentPage='adminHome', **sessionInfo, searchBarForm=searchBarForm, recentPosts=recentPosts)
def _get_representing_users(self, userid: str, vote: str = None) -> Set[int]: if vote is None: participating = set() else: participating = dm.lookup_voting_by_votename(vote) participating.add(userid) total_representing_id = set() looking = Queue() looking.put_nowait(userid) while not looking.empty(): curent = looking.get_nowait() # you're not representing people who are participating representing = set(dm.lookup_representing(curent)) - participating - total_representing_id total_representing_id |= representing for member in representing: looking.put_nowait(member) return total_representing_id
def __init__(self, config_path): trulia_conf = TruliaConfLoader.TruliaConfLoader(config_path) self.load_trulia_params(trulia_conf) self.db_mgr = DatabaseManager.DatabaseManager(config_path) self.curr_key_idx = 0 # lock for threads to use to add to val_strs self.lock = threading.Lock() self.val_strs = list()
async def createvote(self, ctx: Context, votename: str): if dm.vote_exists(votename): await ctx.send("Vote with that name already exists!") return def check(m): return m.author.id == ctx.author.id and m.channel == ctx.channel await ctx.send("Send line separated choices") options: str try: options = await self.bot.wait_for("message", check=check, timeout=90) except asyncio.TimeoutError: ctx.send("Timed out") return options = options.content.split("\n") dm.create_vote(Vote(votename, options)) await ctx.send("Vote created successfully")
def get_common_words(): # Get parameters from the request area_ids = request.args.getlist('ids') group = request.args.get('group') date = request.args.get('date') period = request.args.get('period') region = request.args.get('region') region_id = request.args.getlist('region_id') ids_dict = {'region': {}} try: rawData = dbMan.get_scotland_district_common_words(area_ids, group, date, period).fetchall() if region and len(region_id) > 0: regionData = dbMan.get_scotland_district_common_words(region_id, 'area', date, period).fetchall() for row in regionData: word_arr = filterstopwords(row.word_arr if row is not None and row.word_arr is not None else []) ids_dict['region']["%.0f" % (row.hour.timestamp() * 1000)] = word_arr elif region: regionData = dbMan.get_scotland_common_words(date, period).fetchall() for row in regionData: word_arr = filterstopwords(row.word_arr if row is not None else []) ids_dict['region']["%.0f" % (row.hour.timestamp() * 1000)] = word_arr except Exception: raise InvalidUsage('Encountered an error fetching data from the database', status_code=500) try: for row in rawData: if row.group_id not in ids_dict: ids_dict[row.group_id] = {} ids_dict[row.group_id]["%.0f" % (row.hour.timestamp() * 1000)] = filterstopwords(row.word_arr if row.word_arr is not None else []) except Exception as e: raise InvalidUsage('Failed to format the database data', status_code=418) return jsonify(ids_dict)
def all_scotland_ward_data(): # Get parameters from the request area_ids = request.args.getlist('ids') region = request.args.get('region') date = request.args.get('date') period = request.args.get('period') ids_dict = {} try: # If region data is requested, pop the last id in the list and fetch area data if region: region_id = area_ids.pop() ids_dict[region_id] = parse_twitter_data( dbMan.get_scotland_district_tweets([region_id], "area", date, period).fetchall(), date, period ) # Get the tweet data for all tweets from the specified area ids raw_data = dbMan.get_scotland_district_tweets(area_ids, "ward", date, period).fetchall() except Exception: raise InvalidUsage('Error occurred when fetching data from the database', status_code=500) try: # Initialise tweet arrays tweet_dict = {} for area_id in area_ids: tweet_dict[area_id] = [] # Sort and group tweets by area_id for tweet in raw_data: if tweet["ward_id"] is not None: tweet_dict[tweet["ward_id"]].append(tweet) # Parse twitter data and store to id dictionary for key, tweets in tweet_dict.items(): ids_dict[key] = parse_twitter_data(tweets, date, period) except Exception: raise InvalidUsage('Somehow crashed when processing database results', status_code=418) # Send those bad boys away return jsonify(ids_dict)
def __init__(self): self.__active_user_list = [] self.__sys_data = ERPDate.ERPDate(1, 1) self.__raw_material_order_manager = RawMaterialOrderManager.RawMaterialOrderManager( ) self.__repository_manager = RepositoryManager.RepositoryManager() self.__database_manager = DatabaseManager.DatabaseManager() self.__request_msg = [] self.__fcn_dic = [] self.__init_system() self.test_main()
def main(): try: # Search for tweets in Scotland print("Getting twitter stream") tweet_stream = __twitter.get_scotland_twitter_stream() if tweet_stream is not None: print("Listening for tweets...") for tweet in tweet_stream: dbMan.save_scotland_tweet(tweet) except ProtocolError as e: print("Killing myself now as a PROTOCOL ERROR occurred") print(e) _thread.interrupt_main() except Exception as e: print("Killing myself now as an error occurred") print(e) _thread.interrupt_main()
def start(self, strat, capital, commission): self.initDatabase() self.initPipelineTables() self.initStatisticsTables() self.initTradingTable() self.initBookTables() entryConditions, exitConditions = stratFuncs strat = stra.Strategy(stratName, entryConditions, exitConditions) self.databaseManager = dbm.DatabaseManager(self.dbRef, self.conn, strat, auth) self.databaseManager.setTradingParameters(self.exchange, self.ticker)
def getDBData(symbol, timeframe='Y', quality=default_quality) -> dict: """get candle from DB Arguments: symbol {str} -- stock symbol(eg:AAPL) Keyword Arguments: timeframe {str} -- the timeframe of the candle (default: {'Y'}) Returns: dict -- candle """ return DM.getData(symbol, timeframe, quality)
def CreateDatabaseManager(appID, result_p): expDBDirPath = os.environ["CommonProgramFiles"] + "\\VOCALOID5\\Explib" if(expDBDirPath == None): return None elif(expDBDirPath == ""): return None try: databaseManager = DatabaseManager.DatabaseManager(DatabaseManagerIF.VDM_createDatabaseManager(appID, expDBDirPath, result_p)) if(result_p.contents.value != VDMError.VDMError.NotAny): return None else: return databaseManager except: return None
def run(self): links = [ "https://news.google.com/covid19/map?hl=tr&gl=TR&ceid=TR%3Atr", "https://news.google.com/covid19/map?hl=tr&gl=TR&ceid=TR%3Atr&mid=%2Fm%2F01znc_", "https://news.google.com/covid19/map?hl=tr&gl=TR&ceid=TR%3Atr&mid=%2Fm%2F09c7w0" ] table = ["Global_Data", "Turkiye_Data", "ABD_Data"] n = 0 for link in links: req = requests.get(link).content soup = BeautifulSoup(req, 'html.parser') anaBilgiler = soup.find_all("div", {"class": "UvMayb"}) altbilgiler = soup.find_all("strong") anaBilgilerList = [] altBilgilerList = [] for a in anaBilgiler: anaBilgilerList.append(a.text) for b in altbilgiler: altBilgilerList.append(b.text) if len(altBilgilerList) > 4: data = DatabaseManager.sqliteData(table[n]) data.Add(anaBilgilerList[0], altBilgilerList[0], anaBilgilerList[1], altBilgilerList[2], anaBilgilerList[2], altBilgilerList[3], anaBilgilerList[3], altBilgilerList[5]) n += 1 else: data = DatabaseManager.sqliteData(table[n]) data.Add(anaBilgilerList[0], "None", anaBilgilerList[2], "None", anaBilgilerList[3], altBilgilerList[0], anaBilgilerList[4], altBilgilerList[2]) n += 1
def get_districts_tweets(): date = request.args.get('date') try: tweets = dbMan.get_districts_tweets(date).fetchall() except Exception: raise InvalidUsage("Encountered an error fetching the data from the database", status_code=500) try: json_tweets = list(map(format_html_text, tweets)) json_tweets = json.dumps([dict(r) for r in json_tweets], default=alchemyencoder) except Exception: raise InvalidUsage('Could not format twitter data', status_code=418) return json_tweets
def test_client(self): request = 'GET /AutomobileVanzare?Producator=Mercedes&Model=C180&Caroserie=hatchback' db = DatabaseManager('localhost') # Get SQL Query from REST sqlQuery = DatabaseManager.getSQLqueryFromREST(restQuery) # Get the result from SQL Server carObjects = db.SelectFromDatabase(sqlQuery) #the object which i expect to receive is dictionary = {"Producator":"Mercedes","Caroserie":"Sedan","Model":"C180","AnProductie":1999,"VolumMotor":1800, "Pret":8000,"Carburant":"Benzina","Link":"http://www.google.ro"} object = CarObject.carObject(dictionary) self.assertEqual(dictionary.values(),carObjects[0].values())
def ExtractData(): http = urllib3.PoolManager() r = http.request( 'GET', 'https://www.fifa.com/fifa-world-ranking/ranking-table/men/index.html') soup = BeautifulSoup(r.data, "html5lib") # response = "FIFA_Men_Page.html" # with open(response) as fp: # soup = BeautifulSoup(fp, "html5lib") # # #print(soup.find("table").prettify()) trs = soup.find("table").tbody.find_all("tr") for i in trs: ranking = i.find("td", {"class": "tbl-rank"}).span.text countryName = i.find("td", {"class": "tbl-teamname"}).a.text flagLink = i.find("td", {"class": "tbl-teamname"}).span.img['src'] abbreviation = i.find("td", { "class": "tbl-teamname" }).span.img['title'] pointsList = i.find("td", { "class": "tbl-points" }).text.split('(') currentPoints = pointsList[0] rawPoints = pointsList[1].split(')')[0] previousPoints = i.find("td", {"class": "tbl-prevpoints"}).span.text positionDifference = i.find("td", {"class": "tbl-prevrank"}).span.text positionMovement = i.find("td", { "class": "tbl-prevrank-icon" }).span['class'][0] if (positionMovement == "rank-equal"): positionMovement = "Equal" elif (positionMovement == "rank-rise"): positionMovement = "Up" elif (positionMovement == "rank-fall"): positionMovement = "Down" averagePoints = i.find("td", {"class": "tbl-points-avg"}).span.text confederation = i.find("td", {"class": "tbl-confederation"}).span.text print( DatabaseManager.InsertData(ranking, countryName, flagLink, abbreviation, currentPoints, rawPoints, previousPoints, positionDifference, positionMovement, averagePoints, confederation))
def __collect_data(self): """ get data from MySQLdb server """ if bool(self.__db_connected) is False: self.__database_manager = DatabaseManager.DatabaseManager() self.__db_connected = True user_settings = \ self.__database_manager.get_data_from_database('HOME_SCANNER_USER_SETTINGS') if len(user_settings) > 0: if len(user_settings[0]) > 7: self.__temperature_threshold = int(user_settings[0][1]) self.__humidity_threshold = int(user_settings[0][2]) self.__gas_threshold = int(user_settings[0][3]) self.__video_enabled = int(user_settings[0][4]) self.__face_detection_enabled = int(user_settings[0][5]) self.__motion_detection_enabled = int(user_settings[0][6]) self.__human_detection_enabled = int(user_settings[0][7])
class FirewallManager: def __init__(self, dbName): self.dbManager = DatabaseManager(dbName) self.initIPTables() self.deviceDict = dict() self.loadRulesFromDB() def MUDtoFirewall(self, mac_addr, mud_file): in_rules, out_rules = MUDtoRules(mud_file) try: self.implementDeviceRules(mac_addr, in_rules, out_rules) except: print("exception occured") self.dbManager.addIoT(mac_addr, in_rules, out_rules) def isMACRegistered(self, mac_address): return self.dbManager.isMACRegistered(mac_address) #a rule is of the form [protocol, port, domain] where all are strings def implementDeviceRules(self, mac_addr, in_rules, out_rules): self.deviceDict[mac_addr] = DeviceChain(mac_addr) self.deviceDict[mac_addr].buildChain(out_rules) #make chain called iot-chain and route all forward traffic through this chain def initIPTables(self): make_device_chain = 'iptables -N ' + iot_chain route_device_chain = 'iptables -I FORWARD 1 -j ' + iot_chain call(make_device_chain, shell=True) call(route_device_chain, shell=True) def newGenPurpose(self, mac): self.dbManager.addGenDevice(mac) def loadRulesFromDB(self): mac_list = self.dbManager.getIoTList() for mac in mac_list: in_rules, out_rules = self.dbManager.getRules(mac) self.implementDeviceRules(mac, in_rules, out_rules) def updateDNSMapping(self, mac_addr, domain): domain = domain[:len(domain) - 1] #for some reason domain names end with a '.' if mac_addr in self.deviceDict: self.deviceDict[mac_addr].refreshDomain(domain)
import csv import random import DatabaseManager # sub_set_features = [6,7,8,9,12,13,14,15,16,17,21,22,24,33,41,49] sub_set_indexes = DatabaseManager.get_sub_feature_indexes() data_reader = csv.reader(open("original_data.csv","rb")) data_writer = csv.writer(open("subset_features_data.csv", "wb")) header = data_reader.next() header = [header[j] for j in sub_set_indexes] data_writer.writerow(header) patientEncounters = {} readmitted = [] no_readmitted = [] for (index, row) in enumerate(data_reader): addRow = True if(row[1] in patientEncounters): #feature index 1 is patient_nbr #a encounter for this patient was already added addRow = False patientEncounters[row[1]] += 1 else: patientEncounters[row[1]] = 1 if(addRow and int(row[7]) in [13,14,19,20,21]): #feature index 7 is discharge_disposition
import DatabaseManager as db db.log_result(1,3)
#Main driver file for the program. Handles input from the user and the interaction between the classes from UrlManager import * from DatabaseManager import * import re #if __name __ == "__main__" print("***********OSDVB Parser**************") database = DatabaseManager() urlManager = UrlManager() exit = False while exit is False: userUrl = raw_input("Enter Url String from OSDVB search: ") if userUrl.find('http://www.osvdb.org/search/')!=0: #not the best validation print('Invalid Url Input...') continue vulList = urlManager.getList(userUrl) if vulList == None: print('Url cannot be parsed. Please check input...') continue for v in vulList: vul = urlManager.getVulnerability(v) if vul == None: print('Failed in adding vulnerability '+ v +' to database') continue fail = database.addVulnerability(vul) if(fail == True): print('Failed in adding vulnerability '+ v + ' to database') continue
#Runs continusouly, scanning slack for new messages and logging results in database import DatabaseManager as db from Slacker import get_messages from MessageParser import get_parsed_messages last_match_time = db.get_last_match_time() messages = get_messages(oldest_time=last_match_time) parsed_messages = get_parsed_messages(messages) for parsed_message in parsed_messages: if parsed_message['message_type'] == 'match_result': winner_darter_id = parsed_message['parsed_message']['winner_darter_id'] loser_darter_id = parsed_message['parsed_message']['loser_darter_id'] reported_at = parsed_message['parsed_message']['reported_at'] db.log_result(winner_darter_id,loser_darter_id,reported_at) print winner_darter_id else: pass #TODO 1) get last_matcht_time working, 2) figure out cursor closing issue
import csv import sys sys.path.append("../data/dataset_diabetes") import DatabaseManager print "Using diabetes dataset" data_reader = csv.reader(open("../data/dataset_diabetes/subset_features_data.csv", "rb")) headers = data_reader.next() data_list = [row for row in data_reader] temp_data_mat = np.array(data_list) # We need to convert categorical data to ints/floats so we can use one hot encoding data_mat = [] for (index, col) in enumerate(temp_data_mat.T): if index in DatabaseManager.get_indexes_to_encode(): unique_vals = [] for (ii, item) in enumerate(col): if item not in unique_vals: unique_vals.append(item) if item == "?": col[ii] = "NaN" else: col[ii] = unique_vals.index(item) data_mat.append(col) # convert out of the column format data_mat = np.array(data_mat).T # Imputer converts missing values (?'s) to the mean of the column
dom_list = head_dom.getElementsByTagName('zipCode') val_str = "" for dom_i in dom_list: zipcode = dom_i.getElementsByTagName('name')[0].firstChild.nodeValue try: # sometimes there is no lat/lon latitude = dom_i.getElementsByTagName('latitude')[0].firstChild.nodeValue longitude = dom_i.getElementsByTagName('longitude')[0].firstChild.nodeValue except: latitude = str(-1000) latitude = str(-1000) val_str+="(" + zipcode + ",\"" + state_code + "\"," + latitude + "," + longitude + ")," return val_str[:-1] if __name__ == "__main__": import pprint tf = TruliaInfoFetcher('../conf/') # This clears the theft market meta store DatabaseManager.main() tf.fetch_all_states() tf.fetch_all_counties_threaded() tf.fetch_all_zipcodes_threaded() tf.fetch_all_cities_threaded()
from DatabaseManager import * from Penalty import * import os if os.path.isfile("test.db"): os.remove("test.db") databaseManager = DatabaseManager("test.db") for i in range(0,10): databaseManager.insertData("'Roy Gero','Boston','Hooking','09-26-2016','Montreal',0,'Steve'") print databaseManager.getHighestID() event = Penalty("Claude McSlash","Boston Bruins","Slashing",True,"Calgary Flames","July 10, 2015",["Don","Ron"]) databaseManager.insertData(event.formatForSQL())