def rem(id, masterpass): cn = connector() cur = cn.cursor() query = "SELECT acc_id, website_name FROM tbl_cryptostore WHERE id = \'" + str( id) + "\'" cur.execute(query) res = cur.fetchall() cn.close() if (len(res) == 0): print("No passwords stored yet", end='\n\n') session_start(id, masterpass) else: print("\nAvailable Accounts:") for i in range(len(res)): print(str(i + 1) + " " + str(res[i][1])) i = int(input()) i = i - 1 if (i >= len(res)): print("Wrong Option") else: acc_id = res[i][0] query = "DELETE FROM tbl_cryptostore WHERE acc_id = " + str(acc_id) cn = connector() cur = cn.cursor() cur.execute(query) cn.commit() cn.close() a = input("Press enter to continue") session_start(id, masterpass)
def graphOfChosenStockOverTime(): # try: conn = connect.connector() cursor = conn.cursor() names = retTableList() for nam in names: cursor.execute("SELECT * FROM " + nam) row = cursor.fetchone() dates = [] values = [] while row is not None: dates.append(row[0].strftime('%Y-%m-%d')) values.append(row[1]) row = cursor.fetchone() # dates = dates.loc[:, 3].apply(lambda x: x.toordinal()) X = [] for i in dates: X.append(days_between(i, dates[0])) model = linregress(X, values) #x and y are arrays or lists. slope, intercept = model.slope, model.intercept print("\nPredictions for upcoming 5 days for " + nam + ":") new_x = [143, 144, 145, 146, 147] predictions = [] for i in new_x: predict = slope * i + intercept predictions.append(predict) return predictions
def retTableList(): conn = connect.connector() cursor = conn.cursor() cursor.execute("show tables") row = cursor.fetchone() allTables = [] while row is not None: allTables.append(row[0]) row = cursor.fetchone() return allTables
def add(id, masterpass): web = input("Enter website name: ") username = input("Enter username or email id used for this website: ") passGen(random.randint(10, 14)) password = input("Enter password used for this website: ") crypto_pass = encrypt(masterpass, username, password) userid_hash = hashlib.sha256(username.encode()) query = "INSERT INTO tbl_cryptostore(website_name, userid_hash, crypto_pass, id) VALUES(\'" + web + "\', \'" + userid_hash.hexdigest( ) + "\', \'" + crypto_pass + "\', " + str(id) + ")" cn = connector() cur = cn.cursor() cur.execute(query) cn.commit() cn.close() print("Done...") a = input("Press enter to continue") session_start(id, masterpass)
def graphOfChosenStockOverTime(): conn = connect.connector() cursor = conn.cursor() cursor.execute("SELECT * FROM abbdaily") row = cursor.fetchone() dates = [] values = [] while row is not None: dates.append(row[0].strftime('%Y-%m-%d')) values.append(row[1]) row = cursor.fetchone() # print (values) plt.plot(dates, values) plt.xticks([], []) plt.show()
def cryptocheck(id, acc_id, masterpass): print("Enter username/email id used for this account") username = input() user_hash = hashlib.sha256(username.encode()) cn = connector() cur = cn.cursor() query = "SELECT crypto_pass FROM tbl_cryptostore WHERE userid_hash = \'" + user_hash.hexdigest( ) + "\'" cur.execute(query) res = cur.fetchall() cn.close() if (len(res) <= 0): print("Invalid username entered\n\n") sleep(2) session_start(id, masterpass) else: print("Password: "******"Press enter to continue")
def login(): print("USER LOGIN") print("Enter your name:") name = input().lower() print("Enter your mater key:") mkey = input() mkeyhash = hashlib.sha256(mkey.encode()) masterpass = hashlib.shake_256(mkey.encode()) cn = connector() cur = cn.cursor() query = "SELECT id FROM tbl_users WHERE master_hash = \'" + mkeyhash.hexdigest( ) + "\' and username = \'" + name + "\'" cur.execute(query) res = cur.fetchall() cn.close() if (len(res) == 0): print("Invalid credentials entered...") a = input("Press enter to continue") os.system('CLS') else: os.system('CLS') session_start(res[0][0], masterpass)
def register(): print("USER REGISTRATION") print("Enter your name:") tname = input().lower() print( "Enter a masterkey you would like to use: (Should be atleast 8 characters long)" ) tmkey = input() tmkeyhash = hashlib.sha256(tmkey.encode()) if (len(tmkey) >= 8): query = "INSERT INTO tbl_users(master_hash, username) values(\'" + tmkeyhash.hexdigest( ) + "\',\'" + tname + "\')" cn = connector() cur = cn.cursor() cur.execute(query) cn.commit() cn.close() os.system('CLS') login() else: print("Please make sure it is atleast 8 characters long...") a = input("Press enter to continue") os.system('CLS') register()
import wbdata import datetime import iso3166 import requests from bs4 import BeautifulSoup import sys sys.path.append('../other/') import connect # In[63]: ### Connect to database con = connect.connector() # In[14]: ### Web scraper for collecting featured indicators url = 'http://data.worldbank.org/indicator?tab=featured' response = requests.get(url) html = response.content soup = BeautifulSoup(html) # In[61]: ### Collect indicatorID indicatorID = [] all_links = soup.find_all("a") for link in all_links:
import pandas as pd import wbdata import datetime import sys sys.path.append('../other/') import connect # Connect to database con = connect.connector() # Get countries, indicators countries = wbdata.get_country(display=False) indicators = wbdata.get_indicator(display=False) # Collect CountryID countryID = [] for i in range(len(countries)): countryID.append(countries[i]['id']) # Collect indicatorID indicatorID = [] for i in range(1000, len(indicators)): indicatorID.append(indicators[i]['id']) # Make indDict for fetching data indDict = dict() for indStr in indicatorID: indDict[indStr] = indStr.replace('.','_') # fetch data
# import createNetwork as cnet # import TweetsTextMining as ttm import pandas as pd import datetime import networkx import re from collections import Counter import EventTree import connect as cnt language = 'en' USER = '******' PASS = '******' HOST = 'localhost' DATABASE = 'db_paris_attack' connector = cnt.connector(USER, PASS, HOST, DATABASE) cursor = connector.cursor() start_time = datetime.date(2015, 11, 1) end_time = datetime.date(2015, 11, 30) # df = cnet.extract_dataframe(cursor, start_time, end_time) # df.to_csv("pairs_"+start_time.strftime("%Y-%m-%d")+".csv", index = False, encoding='utf-8') # df = pd.read_csv("/Users/xianteng/Downloads/pairs_2015-11-01.csv") # user_network = cnet.build_user_network(df) # print("=== number of nodes: " + str(user_network.number_of_nodes()) + "\n") # print("=== number of edges: " + str(user_network.number_of_edges()) + "\n") # df = pd.read_csv("/Users/xianteng/Downloads/pairs_2015-11-01.csv") # df.en = df[df['lang'] == 'en'] # ttm.term_frequency(df.en)
def searchTag(searchedTag): conn = connect.connector() # daily analysis restricted to 7 days cursor = conn.cursor() daily = searchedTag + "daily" monthly = searchedTag + "monthly" cursor.execute("SELECT * FROM " + daily + " order by recordtime desc limit 7") row = cursor.fetchone() dates_dail = [] values_dail = [] while row is not None: dates_dail.append(row[0].strftime('%Y-%m-%d')) values_dail.append(row[1]) row = cursor.fetchone() # monthly analysis cursor.execute("SELECT * FROM " + monthly) row = cursor.fetchone() dates_mon = [] values_mon = [] while row is not None: dates_mon.append(row[0].strftime('%Y-%m-%d')) values_mon.append(row[1]) row = cursor.fetchone() plt.plot(dates_mon, values_mon) plt.xticks([], []) #-----------------predictions section----------------------------------------------------------------------- cursor.execute("SELECT * FROM " + daily) row = cursor.fetchone() dates = [] values = [] while row is not None: dates.append(row[0].strftime('%Y-%m-%d')) values.append(row[1]) row = cursor.fetchone() X = [] for i in dates: X.append(days_between(i, dates[0])) model = linregress(X,values) #x and y are arrays or lists. slope, intercept = model.slope, model.intercept new_x = [143, 144, 145, 146, 147] pred_dates = [] for tt in new_x: pred_dates.append(dates[0] + datetime.timedelta(days=i)) predictions = [] for i in new_x: predictions.append(slope*i + intercept) return dates_dail, values_dail, plt, pred_dates, predictions