def reports(meterId=None): """ Creates excel .xlsx reports for a given date range """ if meterId is None: return 'Report API' else: if request.method == 'GET': params = request.args.to_dict() elif request.method == 'POST': params = request.form.to_dict() else: params = {} try: sDate = params['sDate'] eDate = params['eDate'] except KeyError: return 'ERROR: URL must be in form meterNo?sDate=2014-06-01&eDate=2014-06-02' settings_filename = os.path.abspath('settings/dbExample.json') query_filename = os.path.abspath('sql/MeterReadings.sql') params_dict = {'METERID': meterId, 'SDATE':sDate, 'EDATE':eDate} hProfile, dProfile = database.run_query(settings_filename, query_filename, params_dict) OnePhase = False settings_filename = os.path.abspath('settings/dbExample.json') query_filename = os.path.abspath('sql/MeterEvents.sql') params_dict = {'METERID': meterId, 'SDATE':sDate, 'EDATE':eDate} hEvents, dEvents = database.run_query(settings_filename, query_filename, params_dict) filePath = excel.create_excel_report(meterId, sDate, eDate, OnePhase, hProfile, dProfile, hEvents, dEvents) return render_template('report_download.html', filePath=filePath)
def meters(meterId=None): """ Generates the required data for the meters page """ if meterId is None: h, d = get_meter_list() tableMeterList = {'headings':h, 'data':d} return render_template('meters.html', tableMeterList=tableMeterList) else: lat, lon = get_meter_coords(meterId) location = {'lat':lat, 'lon':lon} settings_filename = os.path.abspath('settings/dbExample.json') query_filename = os.path.abspath('sql/MonthlyReports.sql') params_dict = {'METERID': meterId} h, d = database.run_query(settings_filename, query_filename, params_dict) reports = {'headings':h, 'data':d} settings_filename = os.path.abspath('settings/dbExample.json') query_filename = os.path.abspath('sql/Last10Events.sql') params_dict = {'METERID': meterId} h, d = database.run_query(settings_filename, query_filename, params_dict) tableLast10Events = {'headings':h, 'data':d} return render_template('meter.html', meterId=meterId, tableLast10Events=tableLast10Events, location=location, reports=reports)
def purge_ip(ip): yield database.run_query("""DELETE FROM reports WHERE cracker_id IN ( SELECT id FROM crackers WHERE ip_address=? )""", ip) yield database.run_query("DELETE FROM crackers WHERE ip_address=?", ip) yield database.run_query("DELETE FROM legacy WHERE ip_address=?", ip) returnValue(0)
def upload_data(clean_data): try: for i in range(len(clean_data)): sql_command = ('INSERT INTO dbo.player_data_complete (FullName,GameDate,HomeTeam,VisitorTeam,DayofWeek,WL,Min,FgPct,FtPct,Reb,Ast,Stl,Blk,Tov,Pts) VALUES (' + "'" + str(player_name) + "','" + str(clean_data["gameDate"][i]) + "','" + str(clean_data["home"][i]) + "','" + \ str(clean_data["away"][i]) + "','" + str(clean_data["DayofWeek"][i]) + "','" + str(clean_data["wl"][i]) + "'," + str(clean_data["min"][i]) + ",'" + str(clean_data["fgPct"][i]) + "','"+ str(clean_data["ftPct"][i]) + "'," + \ str(clean_data["reb"][i]) + "," + str(clean_data["ast"][i]) + "," + str(clean_data["stl"][i]) + "," + str(clean_data["blk"][i]) + "," + str(clean_data["tov"][i]) + "," + \ str(clean_data["pts"][i]) + ")") print sql_command db.run_query(sql_command) except Exception: pass
def to_db(json_data): """ takes json object and sends it to the database :param json_data: json data loaded from file """ for site in json_data: title = site["title"] link = site["link"] query = "INSERT into sites(link,title) VALUES(?,?)" conn = create_connection(DB_NAME) run_query(conn, query, [link, title]) conn.close()
def set(self): if len(self.upd.get()) != 0: query = 'UPDATE students_info SET %s = ? WHERE %s = ? ' % ( self.text, self.text) parameters = (self.upd.get(), self.updat) run_query(query, parameters) messagebox.showinfo('save', 'your data updated ') self.tree.destroy() self.upbtn.destroy() self.reg.delete(0, END) self.roll.delete(0, END) self.Upwin.destroy() Geometry(self.root2, 350, 300) else: messagebox.showerror('error', 'please enter data')
def checkout(search_string): # if session['logged_in'] == False: # abort(777, description="Not logged in.") total = database.run_total("""SELECT SUM(UnitPrice) FROM cart""") sql = """INSERT INTO invoices ( CustomerId, InvoiceDate, BillingAddress, BillingCity, BillingState, BillingCountry, BillingPostalCode, Total) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?)""" params = (request.values['CustomerId'], request.values['InvoiceDate'], request.values['BillingAddress'], request.values['BillingCity'], request.values['BillingState'], request.values['BillingCountry'], request.values['BillingPostalCode'], total) database.run_insert(sql, params) sql2 = "SELECT * FROM invoices WHERE CustomerId = ?" params2 = (search_string, ) result = database.run_query(sql2, params2) return return_as_json(result)
def download_from_legacy_server(): if config.legacy_server is None or config.legacy_server == "": returnValue(0) logging.info("Downloading hosts from legacy server...") rows = yield database.run_query('SELECT `value` FROM info WHERE `key`="last_legacy_sync"') last_legacy_sync_time = int(rows[0][0]) try: server = yield deferToThread(xmlrpclib.ServerProxy, config.legacy_server) response = yield deferToThread(server.get_new_hosts, last_legacy_sync_time, config.legacy_threshold, [], config.legacy_resiliency) try: last_legacy_sync_time = int(response["timestamp"]) except: logging.ERROR("Illegal timestamp {} from legacy server".format(response["timestamp"])) #Registry.DBPOOL.runOperation('UPDATE info SET `value`=%s WHERE `key`="last_legacy_sync"', (str(last_legacy_sync_time),)) database.run_operation('UPDATE info SET `value`=? WHERE `key`="last_legacy_sync"', str(last_legacy_sync_time)) now = time.time() logging.debug("Got {} hosts from legacy server".format(len(response["hosts"]))) for host in response["hosts"]: legacy = yield Legacy.find(where=["ip_address=?",host], limit=1) if legacy is None: logging.debug("New host from legacy server: {}".format(host)) legacy = Legacy(ip_address=host, retrieved_time=now) else: logging.debug("Known host from legacy server: {}".format(host)) legacy.retrieved_time = now yield legacy.save() except Exception, e: logging.error("Error retrieving info from legacy server: {}".format(e))
def get_meter_list(): settings_filename = os.path.abspath('settings/dbExample.json') query_filename = os.path.abspath('sql/ListMeters.sql') params_dict = {} headings, rows = database.run_query(settings_filename, query_filename, params_dict) return headings, rows
def search_tracks_genre(search_string): sql = """SELECT tracks.Name, * FROM tracks INNER JOIN genres ON tracks.GenreId = genres.GenreId WHERE INSTR(genres.Name, ?)>0""" params = (search_string, ) result = database.run_query(sql, params) return return_as_json(result)
def login(): email = input("Enter your email: ") password = getpass.getpass() query = "SELECT * FROM User WHERE email=" + "'" + email + "' AND password="******"'" + password + "' " item = run_query(query) for i in item: y_N = input( "You have been login successfully.\nDo you want to delete info (y/N) ? " ) if (y_N == 'y'): query = "DELETE FROM User WHERE email=" + "'" + email + "' AND password="******"'" + password + "' " if run_query(query): print("\nDeletion successfully\n.") break else: print("\nPlease check your email or password\n")
def add_student(): if request.method == 'GET': return render_template('index.html') else: query = request.form['query'] print("gotcha",query) query = ln2sql.Ln2sql(database_path=os.path.join(os.getcwd(),'college.sql'),language_path=os.path.join(os.getcwd(),'english.csv')).get_query(query) #get title of course with student whose name is Shravan status = db.run_query(query) return jsonify({'content':status})
def data_entry(self): if self.validation(): # if if self.Primarycheck(): query = 'INSERT INTO students_info VALUES(?, ?, ?, ?, ?, ?)' parameters = (self.name.get(), self.roll.get(), self.reg.get(), self.course.get(), self.address.get(), self.DOB.get()) run_query(query, parameters) messagebox.showinfo('save', 'Student information saved') self.clear() else: messagebox.showerror('error', 'this registration already exist') self.clear() else: messagebox.showerror('save', 'Please enter information!')
def search_tracks_album(search_string): sql = """SELECT * FROM tracks INNER JOIN albums ON tracks.albumId = albums.albumId WHERE INSTR(albums.Title, ?)>0""" params = (search_string, ) result = database.run_query(sql, params) return return_as_json(result)
def Primarycheck(self): query = 'SELECT Reg FROM students_info WHERE Reg = %s ' % self.reg.get( ) c = run_query(query) for i in c: if i[0]: return False return True
def get_meter_coords(meterId): dbPath = 'data/meters.db' conn = sqlite3.connect(dbPath) curs = conn.cursor() settings_filename = os.path.abspath('settings/dbExample.json') query_filename = os.path.abspath('sql/MeterCoords.sql') params_dict = {'METERID': meterId} headings, rows = database.run_query(settings_filename, query_filename, params_dict) lat = rows[0][0] lon = rows[0][1] return lat, lon
def get_unbal_chart_data(meterId): """ Return json object for flot chart """ query_filename = os.path.abspath('sql/Last50UnbalReadings.sql') params_dict = {'METERID': meterId} h, d = database.run_query(SETTINGS_FILENAME, query_filename, params_dict) chartdata = {} chartdata['label'] = 'Unbalance Profile' chartdata['a'] = [] for row in d: dTime = datetime.datetime.strptime(row[0], '%Y-%m-%d %H:%M:%S') ts = int(time.mktime(dTime.timetuple()) * 1000) chartdata['a'].append([ts,row[1]]) return chartdata
def find(self): if self.valid(): query = '' if self.reg.get(): query = "SELECT * FROM students_info WHERE Reg = %s" % ( self.reg.get() ) #UNION SELECT * FROM students_info WHERE Name = %s " % (self.reg.get(), self.name.get()) else: query = "SELECT * FROM students_info WHERE Name =" + "'" + self.roll.get( ) + "'" # print(query) c = run_query(query) a = 0 for i in c: # print(i) if i[0:]: Geometry(self.root2, 690, 350) # Tree View a = 1 self.tree = ttk.Treeview(self.root2, height=5, column=2) self.tree.grid(row=0, column=4, columnspan=3) self.tree.heading('#0', text='Details', anchor=CENTER) self.tree.heading('#1', text='Values', anchor=CENTER) # print information self.tree.insert('', 0, text='Name', values=i[0:]) self.tree.insert('', 1, text='Roll', values=i[1]) self.tree.insert('', 2, text='Reg', values=i[2]) self.tree.insert('', 3, text='Course', values=i[3:]) self.tree.insert('', 4, text='Address', values=i[4:]) #update button self.upbtn = Button(self.root2, text='Update', command=self.select, font=('times', 13, 'bold')) self.upbtn.grid(row=4, column=5) if a == 0: messagebox.showerror('error', 'this key is not available') else: messagebox.showerror('error', 'Enter any answer') #error
def signup(): user = mail = 0 Username = email = password = '' while True: if user == 0: Username = input("Enter Your Username: "******"^[a-zA-Z0-9_]{6,}$", Username): user = 1 pass else: print('Wrong Username\n') continue if mail == 0: email = input("Enter Your Email: ") if re.match("^\w.+@[a-zA-Z]+?\.[a-zA-Z]{2,3}$", email): mail = 1 pass else: print("Wrong email\n") continue else: print("Enter Your password ") password = getpass.getpass() if re.match('^(?=.*[A-Za-z])(?=.*\d)[A-Za-z\d]{8,}$', password): again_password = getpass.getpass("Again password: "******"\nRegistration has been Successfully") break else: print("Not match password\n") continue else: print('Wrong password\n') continue
def search_tracks_artist(search_string): sql = "SELECT * FROM tracks WHERE instr(Composer, ?)>0" params = (search_string, ) result = database.run_query(sql, params) return return_as_json(result)
def update_stats_cache(): global _stats_busy global _cache if _stats_busy: logging.debug("Already updating statistics cache, exiting") returnValue(None) _stats_busy = True logging.debug("Updating statistics cache...") # Fill history table for yesterday, when necessary yield update_recent_history() yield update_country_history() now = time.time() stats = {} stats["last_updated"] = now stats["has_hostnames"] = config.stats_resolve_hostnames # Note paths configured in main.py by the Resource objects stats["static_base"] = "../static" stats["graph_base"] = "../static/graph" stats["server_version"] = __init__.version try: #rows = yield database.run_query("SELECT num_hosts,num_reports, num_clients, new_hosts FROM stats ORDER BY time DESC LIMIT 1") stats["num_hosts"] = yield models.Cracker.count() stats["num_reports"] = yield models.Report.count() rows = yield database.run_query("SELECT count(DISTINCT ip_address) FROM reports") if len(rows)>0: stats["num_clients"] = rows[0][0] else: stats["num_clients"] = 0 yesterday = now - 24*3600 stats["daily_reports"] = yield models.Report.count(where=["first_report_time>?", yesterday]) stats["daily_new_hosts"] = yield models.Cracker.count(where=["first_time>?", yesterday]) recent_hosts = yield models.Cracker.find(orderby="latest_time DESC", limit=10) yield threads.deferToThread(fixup_crackers, recent_hosts) stats["recent_hosts"] = recent_hosts most_reported_hosts = yield models.Cracker.find(orderby="total_reports DESC", limit=10) yield threads.deferToThread(fixup_crackers, most_reported_hosts) stats["most_reported_hosts"] = most_reported_hosts logging.info("Stats: {} reports for {} hosts from {} reporters".format( stats["num_reports"], stats["num_hosts"], stats["num_clients"])) if stats["num_reports"] > 0: yield Registry.DBPOOL.runInteraction(make_daily_graph) yield Registry.DBPOOL.runInteraction(make_monthly_graph) yield Registry.DBPOOL.runInteraction(make_contrib_graph) yield Registry.DBPOOL.runInteraction(make_history_graph) yield Registry.DBPOOL.runInteraction(make_country_bargraph) if _cache is None: _cache = {} _cache["stats"] = stats _cache["time"] = time.time() logging.debug("Finished updating statistics cache...") except Exception, e: log.err(_why="Error updating statistics: {}".format(e)) logging.warning("Error updating statistics: {}".format(e))
def update_stats_cache(): global _stats_busy global _cache if _stats_busy: logging.debug("Already updating statistics cache, exiting") returnValue(None) _stats_busy = True logging.debug("Updating statistics cache...") # Fill history table for yesterday, when necessary yield update_recent_history() yield update_country_history() now = time.time() stats = {} stats["last_updated"] = now stats["has_hostnames"] = config.stats_resolve_hostnames # Note paths configured in main.py by the Resource objects stats["static_base"] = "../static" stats["graph_base"] = "../static/graphs" stats["server_version"] = __init__.version try: #rows = yield database.run_query("SELECT num_hosts,num_reports, num_clients, new_hosts FROM stats ORDER BY time DESC LIMIT 1") stats["num_hosts"] = yield models.Cracker.count() stats["num_reports"] = yield models.Report.count() rows = yield database.run_query( "SELECT count(DISTINCT ip_address) FROM reports") if len(rows) > 0: stats["num_clients"] = rows[0][0] else: stats["num_clients"] = 0 yesterday = now - 24 * 3600 stats["daily_reports"] = yield models.Report.count( where=["first_report_time>?", yesterday]) stats["daily_new_hosts"] = yield models.Cracker.count( where=["first_time>?", yesterday]) recent_hosts = yield models.Cracker.find(orderby="latest_time DESC", limit=10) yield threads.deferToThread(fixup_crackers, recent_hosts) stats["recent_hosts"] = recent_hosts most_reported_hosts = yield models.Cracker.find( orderby="total_reports DESC", limit=10) yield threads.deferToThread(fixup_crackers, most_reported_hosts) stats["most_reported_hosts"] = most_reported_hosts logging.info("Stats: {} reports for {} hosts from {} reporters".format( stats["num_reports"], stats["num_hosts"], stats["num_clients"])) yield Registry.DBPOOL.runInteraction(make_daily_graph) yield Registry.DBPOOL.runInteraction(make_monthly_graph) yield Registry.DBPOOL.runInteraction(make_contrib_graph) yield Registry.DBPOOL.runInteraction(make_history_graph) yield Registry.DBPOOL.runInteraction(make_country_bargraph) if _cache is None: _cache = {} _cache["stats"] = stats _cache["time"] = time.time() logging.debug("Finished updating statistics cache...") except Exception, e: log.err(_why="Error updating statistics: {}".format(e)) logging.warning("Error updating statistics: {}".format(e))
def get_estimate(): global zips house_types = ['S', 'F', 'T', 'O', 'D'] build_types = ['Y', 'N'] est_types = ['L', 'F'] type_offset = 3 build_offset = 8 est_offset = 10 try: if request.method == 'GET': if request.args is not None: home_args = dict(request.args.to_dict().items()) # Run db query result_db = database.run_query(input_args=home_args) zipcode = home_args['zip'] type_ = home_args['type'] newbuild = home_args['newbuild'] estatetype = home_args['esttype'] day = 365.0*6.5 #zips = pd.read_pickle("data/ukpostcodes.pkl") #zips.drop('id',axis=1,inplace=True) try: zipdf = zips.loc[zips['postcode'] == str(zipcode).upper()] lat = zipdf.iloc[0]['latitude'] long_ = zipdf.iloc[0]['longitude'] except (KeyError, IndexError): return jsonify(result=0); # One hot encoding for type newbuild and estatetype # List indices as follows inputs = np.zeros(12) inputs[0] = lat inputs[1] = long_ inputs[2] = day type_idx = house_types.index(type_) build_idx = build_types.index(newbuild) est_idx = est_types.index(estatetype) inputs[type_offset+type_idx] = 1.0 inputs[build_offset+build_idx] = 1.0 inputs[est_offset+est_idx] = 1.0 # Run ML part tmp_price = run_mlp.run_once(web_input=inputs) else: print "Args not found" except: pass # Pack up ML results result_ml = {} result_ml['latitude'] = lat result_ml['longitude'] = long_ #tmp_price = run_mlp.run_once(web_input=inputs) result_ml['price'] = int(tmp_price) - int(tmp_price)%100 # Pack up DB results # Get data from result tuple #myio, myo, mtio, mto, vyio, vyo = result_db[0], result_db[1], result_db[2], result_db[3], result_db[4], result_db[5] comps = result_db[6] comps_ll = [] hist_data = result_db[7] try: for address in comps: zipdf = zips.loc[zips['postcode'] == str(address[2]).strip()] lat = zipdf.iloc[0]['latitude'] long_ = zipdf.iloc[0]['longitude'] tmplist = list(address) tmplist.append(lat) tmplist.append(long_) comps_ll.append(tuple(tmplist)) except: pass result_full = (result_db[0], result_db[1], result_db[2], result_db[3], result_db[4], result_db[5], comps_ll, hist_data, result_ml) return jsonify(result=result_full)
def get_all_tracks_html(): result = database.run_query("SELECT * FROM tracks") return render_template("all_tracks.html", data=result)
def test_posts_table_in_stats_database_has_non_zero_count(self): count = int(run_query("select count(*) from posts")[0][0]) self.assertGreater(count, 0)
def get_sites(): """ returns the sites stored in DB """ query = "select link,title from sites" conn = create_connection(DB_NAME) sites = run_query(conn, query) return sites
import Spotupy import database import pandas as pd import spotipy from database import * from spotipy.oauth2 import SpotifyClientCredentials pitchfork_data = database.run_query(sqlfile='pitchfork_data.sql') column_names2 = ["reviewid", "album", "artist", "score", "best_new_music", "author", "pub_day", "pub_month", "pub_year", "genre", "label"] pitchfork_data = pd.DataFrame(pitchfork_data, columns= column_names2 ) sp = Spotupy.run_credentials() """try: """ column_names = ["reviewid", "acousticness", "danceability", "duration_ms", "energy", "instrumentalness", "key", "liveness", "loudness", "mode", "speechiness", "tempo", "time_signature", "valence", "break" ]
def purge_legacy_addresses(): yield database.run_truncate_query('legacy') yield database.run_query('UPDATE info SET `value`=0 WHERE `key`="last_legacy_sync"') returnValue(0)
def search_tracks_year(search_string): sq1 = "SELECT * FROM tracks WHERE instr(Name, ?)>0" params = (search_string, ) result = database.run_query(sq1, params) return return_as_json(result)
def get_qualifying_crackers(min_reports, min_resilience, previous_timestamp, max_crackers, latest_added_hosts): # Thank to Anne Bezemer for the algorithm in this function. # See https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=622697 # This query takes care of conditions (a) and (b) # cracker_ids = yield database.runGetPossibleQualifyingCrackerQuery(min_reports, min_resilience, previous_timestamp) cracker_ids = yield database.run_query(""" SELECT DISTINCT c.id, c.ip_address FROM crackers c WHERE (c.current_reports >= ?) AND (c.resiliency >= ?) AND (c.latest_time >= ?) ORDER BY c.first_time DESC """, min_reports, min_resilience, previous_timestamp) if cracker_ids is None: returnValue([]) # Now look for conditions (c) and (d) result = [] for c in cracker_ids: cracker_id = c[0] if c[1] in latest_added_hosts: logging.debug("Skipping {}, just reported by client".format(c[1])) continue cracker = yield Cracker.find(cracker_id) if cracker is None: continue logging.debug("Examining cracker:") logging.debug(cracker) reports = yield cracker.reports.get(orderby="first_report_time ASC") #logging.debug("reports:") #for r in reports: # logging.debug(" "+str(r)) logging.debug("r[m-1].first, prev: {}, {}".format(reports[min_reports-1].first_report_time, previous_timestamp)) if (len(reports)>=min_reports and reports[min_reports-1].first_report_time >= previous_timestamp): # condition (c) satisfied logging.debug("c") result.append(cracker.ip_address) else: logging.debug("checking (d)...") satisfied = False for report in reports: #logging.debug(" "+str(report)) if (not satisfied and report.latest_report_time>=previous_timestamp and report.latest_report_time-cracker.first_time>=min_resilience): logging.debug(" d1") satisfied = True if (report.latest_report_time<=previous_timestamp and report.latest_report_time-cracker.first_time>=min_resilience): logging.debug(" d2 failed") satisfied = False break if satisfied: logging.debug("Appending {}".format(cracker.ip_address)) result.append(cracker.ip_address) else: logging.debug(" skipping") if len(result)>=max_crackers: break if len(result) < max_crackers: # Add results from legacy server extras = yield Legacy.find(where=["retrieved_time>?", previous_timestamp], orderby="retrieved_time DESC", limit=max_crackers-len(result)) result = result + [extra.ip_address for extra in extras] logging.debug("Returning {} hosts".format(len(result))) returnValue(result)
def get_all_tracks(): result = database.run_query("SELECT * FROM tracks") return return_as_json(result)