def insert_mass_data(query, query_data): """ Make call to insert bulk data""" connector = DBConnector('', "SoftCmpyInfoDB.db") conn = connector.create_schema() db_cmt = DBManipulation(conn) db_cmt.many_insert_query_executor(query, query_data)
def insert_mass_data(self, query, query_data): """ Make call to insert bulk data""" connector = DBConnector('', "SentenceAnalyzer.db") conn = connector.create_schema() db_cmt = DBManipulation(conn) db_cmt.many_insert_query_executor(query, query_data)
def get_single_items_data(item_url): source_code = requests.get(item_url) plain_text = source_code.text soup = BeautifulSoup(plain_text) for item_name in soup.findAll('span', {'class': ['postingtitletext','postingingo']}): print(item_name.text) for link in soup.findAll('a'): href = link.get('href') #print(href) map_regex = re.compile(r'\bmaps.google.com/maps\b') try: mo = map_regex.search(href) if not test_unique(href,map_list) and mo: c = DBConnector() result = c.insert('maplink',maplinkcol=mo.string) #print(c.getCount('maplink')) #print(result) print('Location added: ' + mo.string) #json.dump(mo.string,'maps.txt') fw = open('maps.txt','a') fw.write(mo.string) fw.write('\n') fw.close() map_list.append(mo.string) except: pass #print(href) if not test_unique(href,link_list): pass #print(href) link_list.append(href)
def buyGame(self): c = DBConnector() u = self.user.getUser() userid = u[0][0] g = self.game.getGame() gameid = g[0][0] result = c.insert('usergames',idusers=userid,idgames=gameid) return result
def run_training(self, url, phishing): res = getWhoIs(url) DBConnector().create_url_info(url, phishing, getURLHostName(url), getURLIPAddress(url), getResRange(res), getResCountry(res)) for method in self.test_methods: #print(str(getattr(URLTests, method)) + " " + str(getattr(URLTests, method)(url))) DBConnector().increment_num_of_occurrences(method) if getattr(URLTests, method)(url): DBConnector().increment_num_of_positives(method) else: DBConnector().increment_num_of_negatives(method)
def __init__(self,phone,first_name='',last_name='',dob=''): self.table='person' self.phone = phone if first_name: self.first_name = first_name if last_name: self.last_name = last_name if dob: self.dob = dob c = DBConnector(db_name='vet') result = c.getResults(self.table,phone=self.phone) print(result.message)
def __init__(self): self.db_file_name = None self.release_version = None self.connector = DBConnector.get_connection() self.connector.conn.autocommit = True Init_DB().init_db()
def pandas_analysis(self): connector = DBConnector('', "SentenceAnalyzer.db") conn = connector.create_schema() if conn != None: df = pd.io.sql.read_sql(self.SELECT_ALL_QUERY, conn) plt.scatter(x=df['ID'], y=df['sentimentValue']) #print df #df.plot() #df.groupby(['sentiment']) #sentiment_sum = df.pivot_table('sentimentValue', rows='sentiment', aggfunc='sum') #plt.figure() #sentiment_sum.plot(kind='barh', stacked=True, title="Sum of Sentiment value") #plt.figure() #sentiment_count = df.pivot_table('sentimentValue', rows='sentiment', aggfunc='count') #sentiment_count.plot(kind='barh', title="Count of Sentiment in sentence") plt.show() else: raise Exception("The data cant retrieved from SQLITE")
def __init__(self, client_id = None, client_secret = None, dbURl = None, db_user = None, db_password = None): # spotipy Client flow if client_id is None: client_id = CLIENT_ID if client_secret is None: client_secret = CLIENT_SECRET self.client_id = client_id self.client_secret = client_secret self.client_creds = SpotifyClientCredentials( client_id = self.client_id, client_secret = self.client_secret) # creeate spotipy client self.spotipy = Spotify(auth = self.client_creds.get_access_token()) self.connector = DBConnector()
class DBResulter(object): ''' classdocs ''' dbconnector = DBConnector() apihttprs = [] def __init__(self): ''' Constructor ''' self.dbconnctor = DBConnector() def read_http_rs(self): f = open(filepath, 'a+') for line in open(filepath, 'a+'): tmplist = line.split('@@@') #print type(tmplist[1]) if (eval(tmplist[1])['isdeploydb'] == '1'): #print type(tmplist[1]) self.apihttprs.append( dict(apiname=tmplist[0], httprs=eval(tmplist[1]))) return 0 def test_db_rs(self, apihttprs): self.read_http_rs() if (apihttprs['apiname'] == 'phone') or (apihttprs['apiname'] == 'schedulePhone'): key = apihttprs['httprs']['act_rs']['data']['reqId'] query = 'select * from bainuo_sms_req where req_id=\'%s' % ( key) + '\'' print query db_rs = self.get_dbrs(query) print db_rs if (len(db_rs) > 0): apihttprs['db_rs'] = db_rs else: apihttprs['db_rs'] = 'FALSE' assert len(db_rs) > 0 ################## if (apihttprs['apiname'] == 'HandShake'): key = apihttprs['httprs']['act_rs']['data']['taskId'] query = 'select * from bainuo_sms_task where id=\'%s' % ( key) + '\'' print query db_rs = self.get_dbrs(query) print db_rs if (len(db_rs) > 0): apihttprs['db_rs'] = db_rs else: apihttprs['db_rs'] = 'FALSE' assert len(db_rs) > 0 def get_dbrs(self, query): return self.dbconnector.selct(query)
def test_ip_blacklisted(url): ip = getURLIPAddress(url) if DBConnector().is_ip_blacklisted(ip): return True return False # @staticmethod # def test_four(url): # b = False # try: # source = str(op.open(url)) # c = source.count('<iframe') # if c > 0: # b = True # except Exception as e: # print("Error " + str(e) + " in downloading page " + "url") # return b # @staticmethod # def test_four(url): # print(url) # #URLProcessingUtil.getURLRedirects(url) # print(URLProcessingUtil.getURLHostName(url)) # print(URLProcessingUtil.getURLDomainName(url)) # URLProcessingUtil.getURLIPAddress(url) # res = URLProcessingUtil.getWhoIs(url) # URLProcessingUtil.getResAddress(res) # URLProcessingUtil.getResAddressCountry(res) # URLProcessingUtil.getResCountry(res) # URLProcessingUtil.getResRange(res) # # # return False # # @staticmethod # def test_http_is_secure(url): # scheme = '{uri.scheme}'.format(uri=parse.urlparse(url)) # if scheme.__eq__("https"): # return True # return False # # @staticmethod # def test_http_not_secure(url): # scheme = '{uri.scheme}'.format(uri=parse.urlparse(url)) # if scheme.__eq__("http"): # return True # return False # # @staticmethod # def test_http_not_secure(url): # scheme = '{uri.scheme}'.format(uri=parse.urlparse(url)) # if scheme.__eq__("http"): # return True # return False
def loadDBRicette(self): cnx = DBConnector.connect() crs = cnx.cursor() with open("link_ricette.txt","r") as fp: for url in fp: print("Loading " + url + "\n") ric = Ricetta(url) # Aggiungo dati della ricetta add_ricetta = "INSERT INTO ricette(link,category,subcategory) VALUES(%s,%s,%s)" dati_ricetta = (url,ric.category,ric.subCategory) crs.execute(add_ricetta,dati_ricetta) ric_id = crs.lastrowid cnx.commit() # Aggiungo gli ingredienti della ricetta if ric.ingredients != None: for ing in ric.ingredients: try: crs.execute("select id from ingredienti where nome = %s and link = %s",ing) if crs._rowcount > 0: ing_id = crs.fetchone()[0] else: add_ingrediente = "INSERT INTO ingredienti(nome,link) VALUES(%s,%s)" dati_ingrediente = ing crs.execute(add_ingrediente,dati_ingrediente) cnx.commit() ing_id = crs.lastrowid add_ingrediente_ricetta = "INSERT INTO ingredienti_ricette(id_ricetta,id_ingrediente) VALUES(%s,%s)" dati_ingrediente_ricetta = (ric_id,ing_id) crs.execute(add_ingrediente_ricetta, dati_ingrediente_ricetta) cnx.commit() except mysql.connector.Error as Err: if Err.errno == 1062: # Violazione della chiave composta UNIQUE nome_link continue else: print(Err.msg) continue # Aggiungo gli step di preparazione della ricetta if ric.prep != None: len_step_prep = len(ric.prep) if len_step_prep > 0: tup_lst = list() query = "INSERT INTO preparazioni_ricette(id_ricetta,step,descrizione_step) VALUES(%s,%s,%s)" step = 0 for index in range(0,len_step_prep-1): tup_lst.append((ric_id,step,ric.prep[index],)) step += 1 tup_lst.append((ric_id, step, ric.prep[step],)) try: crs.executemany(query,tup_lst) except mysql.connector.Error as Err: print(Err.msg) continue cnx.close()
def get_negative_prior(self): return DBConnector().get_negative_prior() / DBConnector( ).get_total_number_of_entries()
def __init__(self): self.db = DBConnector()
def __init__(self): self.test_methods = [] for method in dir(URLTests): if method.startswith("test"): DBConnector().create_record(method) self.test_methods.append(method)
import pymysql.cursors import json from DBConnector import DBConnector # Connect to the database dbconnector = DBConnector('localhost', 'root', '', 'gerrit_test') inline_comments = dbconnector.inline_comments() patch_details = dbconnector.patch_details() patches = dbconnector.patches() # people = dbconnector.people() requests = dbconnector.requests() request_details = dbconnector.request_detail() review_comments = dbconnector.review_comments() reviews = dbconnector.reviews() def getInlineComments(patch_id, req_id): I = {} for comment in inline_comments: if comment["patchset_id"] == patch_id and comment[ "request_id"] == req_id: I[comment["file_name"]] = { 'comment_id': comment["comment_id"], 'in_reply_to': comment["in_reply_to"], 'line_number': comment["line_number"], 'author_id': comment["author_id"], 'written_on': comment["written_on"], 'status': comment["status"], 'side': comment["side"], 'message': comment["message"],
def create_table(tableString): connector = DBConnector('', "SoftCmpyInfoDB.db") conn = connector.create_schema() db_cmt = DBManipulation(conn) db_cmt.create_table(tableString)
def get_all_data(self): connector = DBConnector('', "SentenceAnalyzer.db") conn = connector.create_schema() db_cmt = DBManipulation(conn) return db_cmt.select_all_data(self.SELECT_ALL_QUERY)
class API: def __init__(self): self.db = DBConnector() # Delete an event based on the id def delete_event(self, args): query = "delete from events where id=%s" variables = [args.get('id')] print variables try: self.db.execute(query, variables=variables) return Response(200, "Successfully deleted event.").jsonify() except mysql.connector.Error as e: return Response(e.errno, e.msg).jsonify() # Update an event based on the id def update_event(self, request): query = """ update events set title=%s, start_date=%s, end_date=%s where id=%s """ variables = [ request.form["title"] if "title" in request.form else None, request.form["start_date"] if "start_date" in request.form else None, request.form["end_date"] if "end_date" in request.form else None, int(request.form["id"]) if "id" in request.form else None ] try: self.db.execute(query, variables=variables) return Response(200, "Successfully updated event.").jsonify() except mysql.connector.Error as e: return Response(e.errno, e.msg).jsonify() # Create a new event and return the created ID def post_event(self, request): query = """ insert into events (title, start_date, end_date) values (%s, %s, %s); select LAST_INSERT_ID(); """ variables = [ request.form["title"] if "title" in request.form else None, request.form["start_date"] if "start_date" in request.form else None, request.form["end_date"] if "end_date" in request.form else None, ] try: result = self.db.fetch_all(query, variables=variables, multi=True) event_id = result[1][0][0] return Response(200, "Successfully created event.", str(event_id)).jsonify() except mysql.connector.Error as e: return Response(e.errno, e.msg).jsonify() # get all events def get_events(self, args): start = args.get('start_date') end = args.get('end_date') variables = None if start is None and end is None: # all events query = "select * from events;" elif start is not None and end is not None: # date range parameter query = """ select * from events where start_date >= %s and start_date <= %s """ variables = [start, end] else: return Response(400, "Invalid URL request.").jsonify() try: result = self.db.fetch_all(query, variables)[0] return self.__jsonify_result(result) except mysql.connector.Error as e: return Response(e.errno, e.msg).jsonify() # Convert results array to json def __jsonify_result(self, result): json_arr = [] headers = self.db.headers() # convert row to dictionary for row in result: row_dict = {} for i in range(len(headers)): if type(row[i]) == datetime.datetime: row_dict[headers[i]] = row[i].strftime( FMT) # date to string else: row_dict[headers[i]] = row[i] json_arr.append(row_dict) return json.dumps(json_arr)
import pymysql.err as dbException import getpass from DBConnector import DBConnector from SerialListener import SerialListener from Queries import * from FunctionHub import * print("\n\r\n\r\n\r Initialisation...") print(" ---------------------------------------------------------") configFile = "config.json" kcy125 = SerialListener(configFile) database = DBConnector(configFile) print(" Initialisation finished!") print("\n\r\n\r\n\r Admin authorisation...") print(" ---------------------------------------------------------") logging = 1 while logging: adminLogin = input(" Login: "******"\n\r User '" + adminLogin +
def getGame(self): c = DBConnector() result = c.getResults('games',Game=self.game) return result
import datetime import pymysql.err as dbException from DBConnector import DBConnector from SerialListener import SerialListener from Queries import * from FunctionHub import * print("\n\r Initialisation...\n\r") print(" ---------------------------------------------------------") configFile = "config.json" kcy125 = SerialListener(configFile) database = DBConnector(configFile) print(" Initialisation finished!\n\r") while True: print("\n\r\n\r\n\r Person handling...") print(" ---------------------------------------------------------") print(" Press card...\n\r") card = IDCardAdapter(kcy125.SerialRead()) database.Necromancy() queryResult = database.QueryExecute(SelectCurrentCard(card)) if queryResult: database.ThrowError(DBErrorMessageHandler(1062, card)) else: queryResult = database.QueryExecute( SelectTodayOrder(str(datetime.date.today()), card))
log.error('send_mail failed') if __name__ == '__main__': args = get_args() if args.debug: log.setLevel(logging.DEBUG) else: log.setLevel(logging.INFO) fileHandler = logging.FileHandler('steamGrpService.log') fileHandler.setFormatter(logFormatter) log.addHandler(fileHandler) # preparing objects dbconnection = DBConnector() dbconnection.connect() parser = SiteParser() log.info('starting main loop') while True: items = [] with open(args.urljson, 'r') as f: log.debug('loading group urls from json...') urls_dict = json.load(f) for url_id in urls_dict: if urls_dict[url_id]['type'] in parser.canParse: items = items + parser.get_item_list( urls_dict[url_id]['url'], urls_dict[url_id]['type']) log.info('got {} announcements from rss feed'.format(items.__len__()))
def __init__(self): ''' Constructor ''' self.dbconnctor = DBConnector()
def __init__(self): wx.Frame.__init__(self, None, title="Restaurant Manager v0.1", pos=(150, 150), size=(350, 400)) self.theDBConnector = DBConnector(self) self.mySqlUpdater = mySqlUpdater(self) self.myHtmlViewer = HtmlViewer(self) panel = wx.Panel(self) self.Bind(wx.EVT_CLOSE, self.OnClose) self.statusbar = self.CreateStatusBar() menubar = wx.MenuBar() fileMenu = wx.Menu() menubar.Append(fileMenu, '&File') MenuQuitItem = fileMenu.Append(wx.ID_EXIT, 'Quit', 'Quit application') self.Bind(wx.EVT_MENU, self.OnClose, MenuQuitItem) MenuAddCustomerItem = fileMenu.Append(wx.ID_ANY, 'Add Customer', 'Adding new Customer') self.Bind(wx.EVT_MENU, self.OnAddCustomer, MenuAddCustomerItem) MenuAddTableItem = fileMenu.Append(wx.ID_ANY, 'Add Table', 'Adding new Table') self.Bind(wx.EVT_MENU, self.OnAddTable, MenuAddTableItem) sqlMenu = wx.Menu() menubar.Append(sqlMenu, '&Sql') MenuUpdateSqlItem = sqlMenu.Append(wx.ID_ANY, 'Update', 'Update Sql') self.Bind(wx.EVT_MENU, self.mySqlUpdater.UpdateMySql, MenuUpdateSqlItem) MenuPrintLastResultSqlItem = sqlMenu.Append(wx.ID_ANY, 'Print Last Result', 'Prints Last Result') self.Bind(wx.EVT_MENU, self.theDBConnector.PrintLastResult, MenuPrintLastResultSqlItem) self.SetMenuBar(menubar) box = wx.BoxSizer(wx.VERTICAL) m_text = wx.StaticText(panel, -1, "Information") m_text.SetFont(wx.Font(14, wx.SWISS, wx.NORMAL, wx.BOLD)) m_text.SetSize(m_text.GetBestSize()) box.Add(m_text, 0, wx.ALL, 10) self.num_of_customers_button = wx.Button(panel, wx.ID_ANY, "") self.num_of_customers_button.Bind(wx.EVT_BUTTON, self.theDBConnector.RunTest) box.Add(self.num_of_customers_button, 0, wx.ALL, 10) self.num_of_tables_button = wx.Button(panel, wx.ID_ANY, "") self.num_of_tables_button.Bind(wx.EVT_BUTTON, self.OnTablesButtonClicked) box.Add(self.num_of_tables_button, 0, wx.ALL, 10) languages = ['C', 'C++', 'Python', 'Java', 'Perl'] self.combo = wx.ComboBox(panel, choices=languages) box.Add(self.combo, 0, wx.ALL, 10) self.choice = wx.Choice(panel, choices=languages) box.Add(self.choice, 0, wx.ALL, 10) m_test_button = wx.Button(panel, wx.ID_ANY, "Test database") m_test_button.Bind(wx.EVT_BUTTON, self.theDBConnector.RunTest) box.Add(m_test_button, 0, wx.ALL, 10) panel.SetSizer(box) panel.Layout() self.UpdateInformation()
import bbc_health import bbc_politics import bbc_science import bbc_technology import ctv_business import ctv_canada import ctv_entertainment import ctv_health import ctv_lifestyle import ctv_politics import ctv_politics import ctv_science_tech import ctv_sports import ctv_world database = DBConnector() globalnews_rss.globalnews_rss(database) bbc_business.bbcnews_business(database) bbc_education.bbcnews_education(database) bbc_entertainment.bbcnews_entertainment(database) bbc_health.bbcnews_health(database) bbc_politics.bbcnews_politics(database) bbc_science.bbcnews_science(database) bbc_technology.bbcnews_technology(database) ctv_canada.ctvnews_canada(database) ctv_entertainment.ctvnews_entertainment(database) ctv_health.ctvnews_health(database) ctv_lifestyle.ctvnews_lifestyle(database)
def get_check_negative_likelihood(self, key): return (DBConnector().get_num_of_negatives(key) + 1) \ / (DBConnector().get_total_num_of_negatives() + DBConnector().get_number_of_tests())
def getUser(self): c = DBConnector() result = c.getResults('users',Name=self.name) return result
# 'author': 'Scout Finch', # 'title': 'Life in Maycomb', # 'content': 'Life in Maycomb is fun.', # 'date_posted': 'April 20, 2018', # 'link': 'https://www.google.com' # }, # { # 'author': 'Atticus Finch', # 'title': 'The Courthouse', # 'content': 'The courthouse is not good.', # 'date_posted': 'April 21, 2018', # 'link': 'https://www.google.com' # } # ] connector = DBConnector() @app.route("/") @app.route("/home") def home(): if request.cookies.get(Constants.CATEGORIES[0]) is None: # No cookies are saved for this person's news preferences. return redirect(url_for('preferences')) # Get a dictionary of users preferences. preferences = preferences_from_request(request) # Get a list of all the categories that the user wants to see. active_categories = active_preferences(request) return render_template('home.html', posts=articles_from_db(connector, active_categories),
def create_table(self, tableString): connector = DBConnector('', "SentenceAnalyzer.db") conn = connector.create_schema() db_cmt = DBManipulation(conn) db_cmt.create_table(tableString)
def __init__(self): self.connector = DBConnector.get_connection() self.connector.conn.autocommit = True
from DBConnector import DBConnector db = DBConnector() db.selectReader("temp_1_1_0") a = list(db.find({})) feature_matrix = [] for tag in a: feature = {} for measure in tag["obs_by"][:1000]: feature.update({measure["measurement_uuid"]: measure["RSSI"] / 100}) feature_matrix.append(feature) from sklearn.feature_extraction import DictVectorizer v = DictVectorizer(sparse=True) x = v.fit_transform(feature_matrix) from sklearn.cluster import DBSCAN dbscan = DBSCAN(eps=10, min_samples=2).fit(x) labels = dbscan.labels_ result = [] print(len(labels)) for i in range(len(labels)): result.append([a[i]["_id"], labels[i]]) print(result)
class Ingestor(): def __init__(self, client_id=None, client_secret=None, dbURl=None, db_user=None, db_password=None): # spotipy Client flow if client_id is None: client_id = CLIENT_ID if client_secret is None: client_secret = CLIENT_SECRET self.client_id = client_id self.client_secret = client_secret self.client_creds = SpotifyClientCredentials( client_id=self.client_id, client_secret=self.client_secret) # creeate spotipy client self.spotipy = Spotify(auth=self.client_creds.get_access_token()) self.connector = DBConnector() def ingest(self): self.ingest_categories() def get_artist_by_name(self, name): """Retrives a list of artist obecjts based on searching for the artist name""" q = 'artist:' + name return self.spotipy.search(q, limit=10, offset=0, type='artist', market=None)['artists']['items'] def ingest_related_artist(self, artist_id, limit=NUM_REL_ARTISTS, _depth=0): """Recursively ingest artist related to a specific artist. Artist must already be in DB.""" # TODO: check if artist DNE and insert if so self.connector.update_artist(artist_id, 'ingested', 'true') ara = self.spotipy.artist_related_artists(artist_id) for i, a in enumerate(ara['artists']): if i > limit: break self.connector.insert_artist(a) self.connector.insert_related_relation(artist_id, a['id']) if _depth > 0: self.ingest_related_artist(a['id'], _depth - 1) def ingest_by_name(self, name): """Ingests the first artist from the search of on artist by name into the database""" res = self.get_artist_by_name(name)[0] self.connector.insert_artist(res) def ingest_by_id(self, artist_id): """Ingest artist by id""" res = self.spotipy.artist(artist_id) self.connector.insert_artist(res) def ingest_categories(self): """Ingest categories from spotifies list of categories""" print("Pull categories from API") categories = self.spotipy.categories()['categories']['items'] for c in tqdm(categories, desc='Inserting categories'): self.connector.insert_category(c) self.ingest_category_playlist(c['id']) def ingest_category_playlist(self, category_id): # This is a list of playlists try: play_lists = self.spotipy.category_playlists( category_id)['playlists']['items'] except SpotifyException as e: print('Category %s play list ingestion failed' % category_id) else: for p in tqdm(play_lists, desc='Insertiing Category playlists'): self.connector.insert_playlist(p) # TODO: insert playlist category relation # category id and p['id'] # TODO: ingest tracks # p['tracks'] def clear_database(self): """Clears the database this ingerstor is connected to.""" # TODO: add warning self.connector.clear_database()
from DBConnector import DBConnector import json #tup = (('1001', 'Andy', 'Depczynski', '3 Woodridge Ave.', 'Cheektowaga', 'NY', '14225', 'datetime.date(1989, 9, 6)'),) c = DBConnector(db_name='vet') result = c.getResults('person') #print(result.message) dict = dict() count = 1 for i in result.message: for j in i: key = 'id'+str(count) dict[key] = {count,j} count+=1 #jsonObj = json.loads(dict) #print(jsonObj) print(dict) for i in dict: print(i)
import pymysql.cursors import json from DBConnector import DBConnector # Connect to the database dbconnector = DBConnector('localhost', 'root', '', 'kolpbdc_site') print("Getting from DB...") books = dbconnector.books() print(books) dbconnector.saveBook("10", "My book")