def create_messages_table(): con = db_connect('db_files/test.sqlite3') cur = con.cursor() messages_sql = """ CREATE TABLE messages ( id test PRIMARY KEY, created_at integer, user_id integer, group_id integer, name text NOT NULL, text text NOT NULL, system integer, favorited_by text )""" cur.execute(messages_sql)
def remove_user_table(rm_table_name: str, requesting_user: str= "NO_USER_SPECIFIED"): dbConnection = db_utils.db_connect() tn = json.dumps(rm_table_name) un = json.dumps(requesting_user) result = pd.read_sql(f"SELECT table_name FROM {db_config.USER_DB_UPLOADS_TBL_NAME} where table_name={tn} and username={un};", dbConnection) if not result.empty: delete_this_table = result['table_name'].tolist()[0] dbConnection.execute(f"DROP TABLE IF EXISTS {delete_this_table}") dbConnection.execute(f"DELETE FROM {db_config.USER_DB_UPLOADS_TBL_NAME} WHERE table_name={tn}") print(f"Success: Deleted Table {delete_this_table}") return f"Success: Deleted Table {delete_this_table}" else: print(f"Invalid delete") return f"Invalid delete."
def log(self): # write to external database con = db_connect() cur = con.cursor() sql = '''INSERT INTO observations (timestamp, temperature, detailed_status) VALUES (?, ?, ?)''' cur.execute(sql, (self.timestamp, self.temperature, self.detailed_status)) con.commit() con.close() return 'Observation logged'
def collect_incomes(): current, previous = generate_income_query(2020), generate_income_query( 2019) df_current = db_connect(current) df_previous = db_connect(previous) # земля land = (df_current.loc[df_current["INCO"].isin(LAND)].groupby( "ADMIN", as_index=False)["EXECUTED"].sum().rename( columns={"EXECUTED": "Плата за землю (p2_05)"})) # ПДФО pdfo = (df_current.loc[df_current["INCO"].eq("11010000")].groupby( "ADMIN", as_index=False)["EXECUTED"].sum().rename( columns={"EXECUTED": "Податок на дохід фіз осіб (для p2_01)"})) # Загальний фонд, без оф. трансферт \ цей рік mask = df_current["FIN_SOURCE"].eq("C") & df_current["INCO"].str.contains( "^[1235]0000000") wo_transfers = (df_current.loc[mask].groupby( "ADMIN", as_index=False)["EXECUTED"].sum().rename( columns={"EXECUTED": "Дохід без міжбюдж. трансфертів (p2_02)"})) # Загальний фонд, без оф. трансферт \ попередній рік mask = df_previous["FIN_SOURCE"].eq( "C") & df_previous["INCO"].str.contains("^[1235]0000000") wo_transfers_previous = (df_previous.loc[mask].groupby( "ADMIN", as_index=False)["EXECUTED"].sum().rename( columns={ "EXECUTED": "Дохід без міжбюдж. трансфертів _ ПОПЕРЕДНІЙ РІК" })) return merge_all( [land, pdfo, wo_transfers, wo_transfers_previous], on="ADMIN", return_all_regions=False, )
def artists_page(): conn = db_connect() cur = conn.cursor() cur.execute( 'SELECT artists.Name, albums.Title FROM artists LEFT JOIN albums ORDER BY artists.Name' ) rows = cur.fetchall() html = """ <table> <tr> <th>Artist</th><th>Album</th> </tr>""" for row in rows: html += "<tr> <td>{}</td> <td>{}</td> </tr>".format(row[0], row[1]) html += "</table>" return html
def get_latest_update_covid(date: str) -> pd.DataFrame: dbConnection = db_utils.db_connect() # Gets the last valid date's data if exists and no data exists on the actual date sql = '''SELECT t.index, t.date, t.county, t.state, t.fips, t.cases, t.deaths FROM ( SELECT county, MAX(date) as MaxDate FROM ( select date, county, state,cases, deaths from main_covid_data where DATE(date)<= %(var)s) as A GROUP BY county ) r INNER JOIN main_covid_data t ON t.county = r.county AND t.date = r.MaxDate''' return pd.read_sql(sql=sql, con=dbConnection, params={"var": date})
def main(): query_p2_4 = f""" SELECT [ADMIN], SUM([EXECUTED]) as [Капітальні видатки (p2_04)] FROM [Budget].[dbo].[OpenBudgetExpenses] WHERE [ADMIN] IN ('02000000000', '03000000000', '04000000000', '05000000000', '06000000000', '07000000000', '08000000000', '09000000000', '10000000000', '11000000000', '12000000000', '13000000000', '14000000000', '15000000000', '16000000000', '17000000000', '18000000000', '19000000000', '20000000000', '21000000000', '22000000000', '23000000000', '24000000000', '25000000000') AND [ECON] LIKE '3%' AND [DATE] >= '2020-07-01' AND [DATE] <= '2020-09-30' GROUP BY [ADMIN];""" df1 = db_connect(query_p2_4) df2 = collect_incomes() result = pd.merge(df1, df2, on="ADMIN") result.insert(0, "Область", result["ADMIN"].map(ADMIN_REGIONS)) result["Населення"] = result["Область"].map(POPULATION_MAP) result["Податки на одну особу (p2_01)"] = ( result["Податок на дохід фіз осіб (для p2_01)"] / result["Населення"]) result[ "Дохід без міжбюдж. трансфертів порівняно з минулим періодом (p2_03)"] = ( result["Дохід без міжбюдж. трансфертів (p2_02)"] / result["Дохід без міжбюдж. трансфертів _ ПОПЕРЕДНІЙ РІК"]) result.to_excel(PATH_INTERIM / "P2" / "P02_006.xlsx", index=False)
def all_tracks(): conn = db_connect() cur = conn.cursor() cur.execute( 'SELECT Tracks.TrackId, Tracks.Name, Tracks.Composer, Tracks.Milliseconds/1000, albums.Title FROM albums LEFT JOIN Tracks ON Tracks.AlbumId = albums.AlbumId order by Tracks.TrackId asc;' ) rows = cur.fetchall() cur.execute("SELECT TrackId FROM Tracks") IDs = cur.fetchall() id_num = 0 countLinks = 0 html = """ <table class="table table-striped"> <tr> <th>TrackID</th> <th>Name</th> <th>Composer</th> <th>Seconds</th> <th>Album</th> </tr> """ for row in rows: id_num += 1 count = 0 html += """<tr>""" for cell in row: count += 1 html += "<td style='color:green;'>" # Open cell tag if count == 2: # this makes sure only the Name has a bound link # Open Link tag html += "<a href='http://127.0.0.1:8000/tracks/{}' style='color:orange'>".format( id_num) html += str(cell) # Table Cell Argument if count == 2: # this makes sure only the Name has a bound link html += "</a>" # close link tag html += "</td>" # Close cell tag html += "</tr>" html += "</table>" return html
def insert_user(username: str, password: str): meta = sqlalchemy.MetaData() dbConnection, engine = db_utils.db_connect(ret_engine=True) user_accounts = sqlalchemy.Table(db_config.USER_ACCOUNTS_TBL_NAME, meta, autoload_with=engine) ins = user_accounts.insert().values(username=username, password=password) try: result = dbConnection.execute(ins) if not result: return db_return_codes.UNHANDLED_ERROR except sqlalchemy.exc.IntegrityError as e: print(f"Attempted DB Creation of Duplicate username {username}") db_logger.log_error(e, "Warning: Attempted DB Creation of Duplicate User Name") return db_return_codes.UA_INSERT_FAILED_DUPLICATE print(f"User Accounts: Creation of username {username} successful.") db_logger.log_message(f"User Accounts: Creation of username {username} successful.") return db_return_codes.UA_INSERT_SUCCESS
def write_light_settings_to_database(a,b): con = db_connect() cur = con.cursor() # light_settings = """ # CREATE TABLE light_settings ( # ID INTEGER PRIMARY KEY AUTOINCREMENT, # start CHAR(60), # stop CHAR(60), # datetime TEXT )""" # cur.execute(light_settings) time = datetime.now().strftime("%B %d, %Y %I:%M%p") print(type(a)) query = "INSERT INTO light_settings (start, stop, datetime) VALUES (?,?,?);" cur.execute(query, (a, b, time)) con.commit() print("light settings have been written to the database")
def write_temperature_settings_to_database(a,b): con = db_connect() cur = con.cursor() a = int(a.decode('utf-8')) b = int(b.decode('utf-8')) # env_settings = """ # CREATE TABLE env_settings ( # ID INTEGER PRIMARY KEY AUTOINCREMENT, # min_temp INT, # max_temp INT, # datetime TEXT )""" # cur.execute(env_settings) time = datetime.now().strftime("%B %d, %Y %I:%M%p") print(type(time)) print(type(a)) query = "INSERT INTO env_settings (min_temp, max_temp, datetime) VALUES (?,?,?);" cur.execute(query, (a, b, time)) con.commit() print("temps settings have been written to the database")
def read_environment_temp_values_from_last_3_days(): con = db_connect() cursor = con.cursor() query = f"SELECT * FROM environment_values WHERE datetime BETWEEN DATETIME('now', '-3 day') AND DATETIME('now')" cursor.execute(query) records = cursor.fetchall() environment_records = [] for row in records: environment_records.append({ "c":[{"v":row[5]}, {"v":row[1]}] }) environment_data = environment_records payload = {} payload["cols"] =[{"type": "string", "label": "Time"},{"type":"number", "label":"Temperature"}] payload["rows"] = environment_data # pp = pprint.PrettyPrinter() # pp.pprint(payload) return payload
def write_environment_values_to_database(temperature, humidity, pressure, moisture): con = db_connect() cur = con.cursor() # environment_values = """ # CREATE TABLE environment_values ( # ID INTEGER PRIMARY KEY AUTOINCREMENT, # temperature SMALLINT, # humidity SMALLINT, # pressure SMALLINT, # moisture SMALLINT, # datetime TEXT )""" # cur.execute(environment_values) time = datetime.now().strftime("%B %d, %Y %I:%M%p") print("this is the data which is being wrtiiten to the database") print(time) query = "INSERT INTO environment_values (temperature, humidity, pressure, moisture, datetime) VALUES (?,?,?,?,DATETIME('now'));" cur.execute(query, (temperature, humidity, pressure, moisture)) con.commit() print("environment values have been written to the database")
def create_new_table(csv_url: str, new_table_name: str, requesting_user: str="NO_USER_SPECIFIED") -> str: # Perhaps check if the url is bad here: if new_table_name in reserved_tables: print(f"Sorry, {new_table_name} is reserved") return f"Sorry, {new_table_name} is reserved" # Connect dbConnection = db_utils.db_connect() # Check if table is in used_tables result = pd.read_sql(f"SELECT table_name FROM {db_config.USER_DB_UPLOADS_TBL_NAME};", dbConnection) list_of_used_tables = result['table_name'].tolist() if new_table_name in list_of_used_tables: # Fail print(f"Error, the table{new_table_name} is currently in use. Please select a different table name.") return f"Error, the table{new_table_name} is currently in use. Please select a different table name." else: # Successful login, return token perhaps? (SESS_ID token?) pass # Continue try: df_new_table = pd.read_csv(csv_url) except Exception as e: # TODO: Actually split the errors, currently it just fails return f"Error, URL could not be read" # Make new table df_new_table.to_sql(new_table_name, dbConnection, if_exists='replace') # Update USER_DB_UPLOADS_TBL tn = json.dumps(new_table_name) du = json.dumps(csv_url) un = json.dumps(requesting_user) try: result = dbConnection.execute(f"INSERT INTO {db_config.USER_DB_UPLOADS_TBL_NAME}(table_name, data_url, username) " f"VALUES ({tn}, {du}, {un});") except Exception as e: # On fail, rollback result = dbConnection.execute(f"DROP TABLE IF EXISTS {new_table_name}") dbConnection.close() db_logger.log_message(f"UAT Success: Created New Table {new_table_name} by {requesting_user}") return f"Success, Table Created: {new_table_name}"
def make_prozorro_dataset(start_date, end_date): q = """ SELECT [DateInserted], [tenderID_UA], [ProzorroTenderID], [status], [date], [dateModified], [tenderPeriod_startDate], [tenderPeriod_endDate], [procurementMethod], [procurementMethodType], [value_amount], [BudgetName], [BudgetCode], [DepartmentalClassificationCode], [classification_id] FROM [Prozorro].[dbo].[obl_disposers_proc_V1];""" df = db_connect(q) df["DATE_FROM_ID"] = pd.to_datetime( df["tenderID_UA"].str.extract("UA-(\d{4}\-\d{2}\-\d{2})-", expand=False) ) data = df.loc[ df["DATE_FROM_ID"].between(start_date, end_date) & df["status"].eq("complete") ].copy() open_procurements = ( data.loc[data["procurementMethod"].eq("open")] .groupby("BudgetCode")["value_amount"] .sum() .rename("open") ) all_procurements = data.groupby("BudgetCode")["value_amount"].sum().rename("all") result = pd.concat([open_procurements, all_procurements], axis=1).reset_index() result["p2_06_raw"] = result["open"] / result["all"] result["region"] = result["BudgetCode"].map(ADMIN_REGIONS) (PATH_INTERIM / "P2").mkdir(parents=True, exist_ok=True) result.to_excel(PATH_INTERIM / "P2" / "P02_007.xlsx", index=False)
def query_user(username: str, password: str): meta = sqlalchemy.MetaData() dbConnection, engine = db_utils.db_connect(ret_engine=True) user_accounts = sqlalchemy.Table(db_config.USER_ACCOUNTS_TBL_NAME, meta, autoload_with=engine) s = sqlalchemy.select(user_accounts.c.username).where( sqlalchemy.and_(user_accounts.c.username == username, user_accounts.c.password == password)) try: result = dbConnection.execute(s) if not result: return db_return_codes.UNHANDLED_ERROR except sqlalchemy.exc.IntegrityError as e: db_logger.log_error(e, "Error: DB SELECT Failed") return db_return_codes.UA_ERROR_SELECT_FAILED if result.rowcount == 0: # If it doesn't match, it doesn't exist # Return false print(f"Login Failed, returning {db_return_codes.UA_LOGIN_FAILED}") return db_return_codes.UA_LOGIN_FAILED else: print(f"login Success, returning {db_return_codes.UA_LOGIN_SUCCESS}") return db_return_codes.UA_LOGIN_SUCCESS
def show_sivugim(db_path, grant_info, all_grants=False): con = db_connect(db_path) cur = con.cursor() cur_grant = GRANT_DICTS[grant_info['Grant Number']] sivug_info_df = pd.DataFrame(columns=['Sivug Number', 'Sivug Total', 'Sivug Spent', 'Sivug Remaining', 'Percentage Spent']) for sivug in cur_grant.keys(): if grant_info['Grant Number'] in ["3015002026", "3186000162"]: currency_const = 3.5 else: currency_const = 1.0 cur_sivug = cur_grant[sivug] sivug_total = cur_sivug['Amount'] sivug_spent_sql = "SELECT price, amount FROM orders WHERE sivug_number = %s" % sivug if not all_grants: sivug_spent_sql += " AND grant_number = %s" % grant_info['Grant Number'] cur.execute(sivug_spent_sql) sivug_item_prices = cur.fetchall() if len(sivug_item_prices) > 0: sivug_spent = 0 for item in sivug_item_prices: sivug_spent += item[0] * item[1] * TAX_CONST / currency_const else: sivug_spent = 0 sivug_remaining = sivug_total - sivug_spent if sivug_total == 0.0: percentage_spent = 0.0 else: percentage_spent = np.round(sivug_spent / float(sivug_total), decimals=2) * 100.0 sivug_remaining = locale.format_string("%.2f", sivug_remaining, grouping=True) sivug_total = locale.format_string("%.2f", sivug_total, grouping=True) sivug_spent = locale.format_string("%.2f", sivug_spent, grouping=True) cur_sivug_info = {"Sivug Number": sivug, "Sivug Total": sivug_total, "Sivug Spent": sivug_spent, "Sivug Remaining": sivug_remaining, "Percentage Spent": percentage_spent} sivug_info_df = sivug_info_df.append(cur_sivug_info, ignore_index=True) # get all sivugim totals gui_sivugim(db_path, grant_info, sivug_info_df, all_grants)
def get_user_hash(username: str) -> tuple: meta = sqlalchemy.MetaData() dbConnection, engine = db_utils.db_connect(ret_engine=True) user_accounts = sqlalchemy.Table(db_config.USER_ACCOUNTS_TBL_NAME, meta, autoload_with=engine) s = sqlalchemy.select(user_accounts).where(user_accounts.c.username == username) try: result = dbConnection.execute(s) if not result: return db_return_codes.UNHANDLED_ERROR, 0 except sqlalchemy.exc.IntegrityError as e: db_logger.log_error(e, "Error: DB SELECT Failed") return db_return_codes.UA_ERROR_SELECT_FAILED, 0 if result.rowcount == 0: # If it doesn't match, it doesn't exist # Return false # print(f"Login Failed, returning {db_return_codes.UA_LOGIN_FAILED}") return db_return_codes.UA_LOGIN_FAILED, 0 elif result.rowcount == 1: # Execute username, password = result.fetchone() else: # print(f"login Success, returning {db_return_codes.UA_LOGIN_SUCCESS}") return db_return_codes.UNHANDLED_ERROR return db_return_codes.UA_QUERY_SUCCESS, password
def delete_user(username: str, password: str): # Check if the password is valid result = query_user(username=username, password=password) if result == db_return_codes.UA_LOGIN_SUCCESS: # Delete meta = sqlalchemy.MetaData() dbConnection, engine = db_utils.db_connect(ret_engine=True) user_accounts = sqlalchemy.Table(db_config.USER_ACCOUNTS_TBL_NAME, meta, autoload_with=engine) try: dbConnection.execute(user_accounts.delete().where(user_accounts.c.username == username)) if not result: print("Error: Unhandled DB Exception -- delete_user (No Result)") return db_return_codes.UNHANDLED_ERROR except Exception as e: print("Error: Unhandled DB Exception -- delete_user") db_logger.log_error(e, "Error: Unhandled DB Exception -- delete_user") return db_return_codes.UNHANDLED_ERROR db_logger.log_message(f"User Accounts: Deletion of username {username} successful") print(f"Deletion of user {username} successful") return db_return_codes.UA_DELETE_USER_SUCCESS else: print("Delete User: Login Failed, cannot delete without valid un/pw") return db_return_codes.UA_DELETE_USER_FAILED
def allocation(timestamp=None): q = """ SELECT "Dim_Regions"."ShortRegionName" as "region", AVG("dbo_FinancingAll"."Amount") as "Обсяг", "Dim_FinancingStage"."Name" as "Тип фінансування", "dbo_FinancingAll"."TimeStampUpdate" FROM "VB"."Dim_Objects" LEFT JOIN "VB"."Dim_Regions" ON "Dim_Objects"."RegionID" = "Dim_Regions"."ID" LEFT JOIN "VB"."Dim_Places" ON "Dim_Objects"."PlaceID" = "Dim_Places"."ID" LEFT JOIN "VB"."Dim_PlaceType" ON "Dim_Places"."PlaceTypeID" = "Dim_PlaceType"."ID" LEFT JOIN "VB"."dbo_FinancingAll" ON "Dim_Objects"."ID" = "dbo_FinancingAll"."ObjectID" LEFT JOIN "VB"."Dim_FinancingStage" ON "dbo_FinancingAll"."FinancingStageID" = "Dim_FinancingStage"."ID" LEFT JOIN "VB"."Dim_FinancingTypes" ON "dbo_FinancingAll"."FinancingTypeID" = "Dim_FinancingTypes"."ID" WHERE "dbo_FinancingAll"."FinancingStageID" IN (2,3) AND "dbo_FinancingAll"."FinancingTypeID" = 18 GROUP BY "Dim_Regions"."ShortRegionName", "Dim_FinancingStage"."Name", "dbo_FinancingAll"."TimeStampUpdate" ORDER BY "Dim_Regions"."ShortRegionName", "Dim_FinancingStage"."Name", "dbo_FinancingAll"."TimeStampUpdate";""" data = db_connect(q, db="PostgreSQL") df = (data.pivot( index=["region", "TimeStampUpdate"], columns="Тип фінансування", values="Обсяг", ).fillna(0).reset_index()) df["p4_08_raw"] = df["Касові видатки"] / df["Профінансовано"] * 100 if timestamp is not None: mask = df["TimeStampUpdate"].eq(timestamp) return df.loc[mask] else: return df
def extract_game_map(args): game_levels = args.game_levels.split(":") game_map = {} db = None for game_level in game_levels: levels = game_level.split(",") game = levels[0] lvls = levels[1:] if (game.isdigit()): if (db is None): db = db_utils.db_connect(args.db_properties) #print db_utils.get_game_info(db,game) game_name = (long(game), db_utils.get_game_info(db, game)["game_desc_file"]) #print db_utils.get_levels_from_game(db,game) lvl_rows = db_utils.get_levels_from_game(db, game) lvl_map = {str(row[0]): row[1] for row in lvl_rows} # lvl_ids = [str(row[0]) for row in lvl_rows ] #lvl_names = #print lvl_rows lvl_names = [] for row in lvls: #print row, lvls,lvl_rows if (str(row) in lvl_map.keys()): lvl_names.append((long(row), lvl_map[str(row)])) #lvls = [(long(row[0]),row[1]) if row[0] in lvls for row in lvl_rows] #print lvls, exit(0) #print game_name, lvl_names game_map[game_name] = tuple(lvl_names) else: lvl_names = zip(tuple(range(0, len(lvls))), lvls) #print lvl_names game_map[(-100L, game)] = tuple(lvl_names) return game_map
def extract_game_map(args): game_levels = args.game_levels.split(":") game_map = {} db = None for game_level in game_levels: levels = game_level.split(",") game = levels[0] lvls = levels[1:] if(game.isdigit()): if(db is None): db = db_utils.db_connect(args.db_properties) #print db_utils.get_game_info(db,game) game_name = (long(game), db_utils.get_game_info(db,game)["game_desc_file"]) #print db_utils.get_levels_from_game(db,game) lvl_rows = db_utils.get_levels_from_game(db,game) lvl_map= {str(row[0]): row[1] for row in lvl_rows} # lvl_ids = [str(row[0]) for row in lvl_rows ] #lvl_names = #print lvl_rows lvl_names = [] for row in lvls: #print row, lvls,lvl_rows if(str(row) in lvl_map.keys()): lvl_names.append((long(row),lvl_map[str(row)])) #lvls = [(long(row[0]),row[1]) if row[0] in lvls for row in lvl_rows] #print lvls, exit(0) #print game_name, lvl_names game_map[game_name] = tuple(lvl_names) else: lvl_names = zip(tuple(range(0,len(lvls))), lvls) #print lvl_names game_map[(-100L,game)] = tuple(lvl_names) return game_map
def readiness(timestamp=None): q = """ SELECT "Dim_Regions"."ShortRegionName" AS "region", AVG("dbo_ReadinessLevelHistory"."PercentOfCost") AS "Готовність за касовими видатками", AVG("dbo_ReadinessLevelHistory"."PercentActs") AS "Готовність за актами", "dbo_ReadinessLevelHistory"."TimeStampUpdate" FROM "VB"."Dim_Objects" LEFT JOIN "VB"."Dim_Regions" ON "Dim_Objects"."RegionID" = "Dim_Regions"."ID" LEFT JOIN "VB"."Dim_Places" ON "Dim_Objects"."PlaceID" = "Dim_Places"."ID" LEFT JOIN "VB"."Dim_PlaceType" ON "Dim_Places"."PlaceTypeID" = "Dim_PlaceType"."ID" LEFT JOIN "VB"."dbo_ReadinessLevelHistory" ON "Dim_Objects"."ID" = "dbo_ReadinessLevelHistory"."ObjectID" GROUP BY "Dim_Regions"."ShortRegionName", "dbo_ReadinessLevelHistory"."TimeStampUpdate" ORDER BY "Dim_Regions"."ShortRegionName", "dbo_ReadinessLevelHistory"."TimeStampUpdate";""" data = db_connect(q, db="PostgreSQL") if timestamp is not None: mask = data["TimeStampUpdate"].eq(timestamp) return data.loc[mask] else: return data
from db_utils import db_connect con = db_connect() # connect to the database cur = con.cursor() # instantiate a cursor obj employees_sql = """ CREATE TABLE employees ( id integer PRIMARY KEY, name text NOT NULL, surname text NOT NULL, salary_year integer NOT NULL) """ cur.execute(employees_sql) taxes_sql = """ CREATE TABLE taxes ( id integer PRIMARY KEY, month date NOT NULL, taxes integer NOT NULL, employee_id integer NOT NULL, FOREIGN KEY (employee_id) REFERENCES employees(id)) """ cur.execute(taxes_sql) positions_sql = """ CREATE TABLE positions ( id integer PRIMARY KEY, internal_number integer NOT NULL, position text NOT NULL, employee_id integer NOT NULL, FOREIGN KEY (employee_id) REFERENCES employees(id)) """ cur.execute(positions_sql) result_db_sql = """ CREATE TABLE result_db (
from db_utils import db_connect con = db_connect() cur = con.cursor() users_sql = """ CREATE TABLE users ( id integer PRIMARY KEY, first_name text NOT NULL, last_name text NOT NULL)""" cur.execute(users_sql) movies_sql = """ CREATE TABLE movies ( id integer PRIMARY KEY, title text NOT NULL)""" cur.execute(movies_sql) cur.execute('SELECT name FROM sqlite_master WHERE type="table"') print(cur.fetchall())
@author: pi """ #!/usr/bin/python # Example using a character LCD connected to a Raspberry Pi import Adafruit_CharLCD as LCD import Adafruit_DHT import matplotlib.pyplot as plt import matplotlib.animation as animation from db_utils import db_connect from db_utils import formatResult from db_utils import insertSQL from datetime import datetime # Connect to database con = db_connect('SensorRead.db', timeout=10) cur = con.cursor() #cur.execute("SELECT name FROM sqlite_master where type = 'table'") #tables = cur.fetchall() # Create table currtime = datetime.now() currtime = currtime.strftime("%Y_%m_%d_%H_%M_%S") tableName = "TempHumid" + currtime table_sql = """ CREATE TABLE %s ( id integer, time_record datetime, temp integer, humid integer)""" % tableName
import os from datetime import datetime as dt import table_conventions as tb import db_utils as db import logging import waisman_general_utils as u print('Doing unit tests of table_conventions') logname = 'unittests_%s.txt' % dt.now().strftime('%d%m') logging.basicConfig(filename=logname, level=logging.INFO) logger = logging.getLogger() con = db.db_connect(DSN='wtp_data') tables = db.get_all_tablenames(con) types = {} phases = {} for table in tables: ret = tb.parse_tablename(table, logger=logger) types[ret['type']] = None try: phases[ret['phase']] = None except: pass print('types') print(u.prettify_str(types)) print('phases') print(u.prettify_str(phases))
from db_utils import db_connect, customer_entry, product_entry, query, product_query, inner_join, update, product_update,delete_mexico db_connect() def customer_info(): print("Do you want to compare tables?") choice = input() if choice == 'y': inner_join() print("Do you want to see all the users?") choice = input() if choice == 'y': query() print("Do you want to update entry?") choice = input() if choice == 'y': print("Specify entry") entry = input() update(entry) elif choice == 'q': quit() first_name = input("First Name: ") last_name = input("Last Name: ") customer_entry(first_name, last_name) def product_info(): print("Do you want to see all the products?")
args_str = ','.join( cur.mogrify("(%s,%s,%s,%s,%s,%s,%s)", q) for q in queries) cur.execute( "INSERT INTO object (object_id, picture_id, category_id," "segment, bbox, is_crowd, area) VALUES " + args_str) except Exception as error: print("annotations could not be inserted, nothing was commited...") print error sys.exit() # Postgres url must be in environment variable DATABASE_URL database, username, password, hostname, port = parse_postgresurl( os.environ["DATABASE_URL"]) conn = db_connect(database, username, password, hostname, port) cur = conn.cursor() training_file = '../annotations/instances_train2014.json' validation_file = '../annotations/instances_val2014.json' print("loading training file...") with open(training_file) as data_file: data = json.load(data_file) loadCategories(cur, data) loadPictures(cur, data) print("loading validation file...") with open(validation_file) as data_file: data = json.load(data_file) loadPictures(cur, data)
def insert_message(): con = db_connect('db_files/test.sqlite3') cur = con.cursor() message_sql = "INSERT INTO messages (id, created_at, user_id, group_id, name, text, system, favorited_by) VALUES (?, ?, ?, ?, ?, ?, ?, ?)" cur.execute(message_sql, ('45678', 1410202583, "141020258341304315", "9951046", "GroupMe", 'Aidan Smith added Max Rash, Riley Anderson, and Logan Deyo to the group', 0, '18803415,6517949')) con.commit()
def insert_member(): con = db_connect('db_files/test.sqlite3') cur = con.cursor() member_sql = "INSERT INTO members (id, messages_sent, likes_given, likes_received, words_sent, likes_by_members, shared_likes, self_likes) VALUES (?, ?, ?, ?, ?, ?, ?, ?)" cur.execute(member_sql, ('1234', 1, 2, 3, 4, 5, 6, 7)) con.commit()
def admin_drop_all_user_accounts() -> None: dbConnection = db_utils.db_connect() dbConnection.execute(f"delete from {db_config.USER_ACCOUNTS_TBL_NAME}")
def process_log(args): date_format = '%Y-%m-%d %H:%M:%S' #Connect to database. db = db_utils.db_connect(args.db_properties) #cur = db.cursor() #This cursor allows access as in a dictionary: cur = db.cursor(mdb.cursors.DictCursor) score = -1 log_file = args.execution_log now = dt.datetime.now() current_timestamp = now.strftime(date_format) log_level = "INFO"; games = [] with open(args.execution_log, 'r') as log_file : for i, line in enumerate(log_file): if(i == 1): run_id = long(line.split(" ")[-1]) if(i == 2): user_id = long(line.split(" ")[-1]) if(i>2): splitted = line[:-1].split(" ") print run_id, user_id, splitted if(splitted[2] != "INFO" ): # check if we have reached the end of the file current_timestamp = now.strftime(splitted[0] + " " + splitted[1] ) if(splitted[2] == "DEBUG"): break else: log_level = splitted[2] error_msg = " ".join(map(str,splitted[3:])) break else: g_split = splitted[3].split(",") game_id = long(g_split[0][-1:]) l_split = splitted[4].split(",") level_id = long(l_split[0][-1:]) score = long(splitted[5]) action_file = splitted[6] games.append((game_id,level_id,score, action_file)) print log_level, run_id, user_id, games, current_timestamp #exit(0) if log_level == 'INFO': #total score: score = calc_score(games) #Set controller status to the proper new state: cur.execute("UPDATE runs set run_state = 'ok', end_time = %s, run_log_file = %s, score = %s where run_id = %s", (current_timestamp,args.execution_log,score,run_id)) cur.execute("UPDATE users set controller_status = 'OK' where user_id = %s", (user_id)) #One match per game played into the database: for game in games: game_id = game[0] level_id = game[1] score = game[2] actions_file = game[3] cur.execute("REPLACE INTO matches (run_id,level_id,game_id,user_id,human_play,actions_file,score) VALUES (%s,%s,%s,%s,0,%s,%s)",(run_id,level_id,game_id,user_id,actions_file,score)) elif log_level == 'ERROR': #Set controller status to the proper new state: cur.execute("UPDATE runs set run_state = 'crash', end_time = %s, run_log_file = %s, run_msg = %s where run_id = %s", (current_timestamp,args.execution_log,error_msg,run_id)) cur.execute("UPDATE users set controller_status = 'crash' where user_id = %s", (user_id)) elif log_level == 'CRITICAL': print (current_timestamp,args.execution_log,error_msg,run_id) #Set controller status to the proper new state: cur.execute("UPDATE runs set run_state = 'failed', end_time = %s, run_log_file = %s, run_msg = %s where run_id = %s", (current_timestamp,args.execution_log,error_msg,run_id)) cur.execute("UPDATE users set controller_status = 'failed' where user_id = %s", (user_id))