def log_exec_status(job_id: str, exec_stage: str, exec_status: str, job_details: str): """Log execution status (job_id, status, job_details) to DB """ with engine.connect() as connection: ins_stmt = insert( ex_stat ).values( # Postgres-specific insert() supporting ON CONFLICT job_id=job_id, stage=exec_stage, status=exec_status, details=json.dumps(job_details)) # If key already present in DB, do update instead upsert = ins_stmt.on_conflict_do_update( constraint='uq_job_id', set_=dict(stage=exec_stage, status=exec_status, details=json.dumps(job_details))) try: connection.execute(upsert) except Exception as e: current_app.logger.error("Insert/Update failed Execution status") current_app.logger.exception(e)
def update_profil(username, profil): update = (User_tbl.update().values(profil=profil).where( User_tbl.c.username == username)) conn = engine.connect() conn.execute(update) conn.close()
def supp_secteur(nom_secteur, nom_chantier): delete = Secteur_tbl.delete().where( (Secteur_tbl.c.nom_secteur == nom_secteur) & (Secteur_tbl.c.nom_chantier == nom_chantier)) conn = engine.connect() conn.execute(delete) conn.close()
def get_user_cards(id): stmt = select([Credit_tbl.c.card_num]).where(Credit_tbl.c.id == id) conn = engine.connect() rs = conn.execute(stmt) conn.close() return rs
def update_graph_3D(focus, sizeref, secteur_selected, chantier): if secteur_selected == {}: return empty_figure() else: try: df = format_df_vector( memoized_data(chantier, "actif", "topographie", "topo.csv") ) secteur = secteur_selected["secteur"] cibles = secteur_selected["cible"] with engine.connect() as con: query1 = f"SELECT * FROM cible_param where nom_chantier = '{chantier}' and masquer = 'x'" query3 = f"SELECT * FROM secteur where nom_chantier = '{chantier}' and nom_secteur='{secteur}'" secteur_params = pd.read_sql_query(query3, con=con) cibles_masque = pd.read_sql_query(query1, con=con).nom_capteur.tolist() liste_cibles = df.cible.tolist() for i in cibles_masque: try: liste_cibles.remove(i) except ValueError: pass df = df[df.cible.isin(liste_cibles)] lat1 = secteur_params.lat1[0] lat2 = secteur_params.lat2[0] lon1 = secteur_params.lon1[0] lon2 = secteur_params.lon2[0] return graph_vecteurs(df, lat1, lat2, lon1, lon2, secteur, cibles, sizeref, focus) except KeyError: return empty_figure()
def get_360(matching_id): result = {} with engine.connect() as connection: query = text("select * from pdp_contacts where matching_id = :matching_id and archived_date is null") query_result = connection.execute(query, matching_id=matching_id) result["contact_details"] = [dict(row) for row in query_result] for row in result["contact_details"]: if row["source_type"] == "salesforcecontacts": donations_query = text("select * from salesforcedonations where contact_id like :salesforcecontacts_id") salesforce_contacts_query_result = connection.execute(donations_query, salesforcecontacts_id=row["source_id"] + "%") salesforce_donations_results = [dict(row) for row in salesforce_contacts_query_result] result['donations'] = salesforce_donations_results if row["source_type"] == "volgistics": shifts_query = text("select * from volgisticsshifts where number = :volgistics_id") volgistics_shifts_query_result = connection.execute(shifts_query, volgistics_id=row["source_id"]) volgisticsshifts_results = [] # todo: temporary fix until formatted in the pipeline for r in volgistics_shifts_query_result: shifts = dict(r) # normalize date string parsed_date_from = dateutil.parser.parse(shifts["from"], ignoretz=True) normalized_date_from = parsed_date_from.strftime("%Y-%m-%d") shifts["from"] = normalized_date_from volgisticsshifts_results.append(shifts) result['shifts'] = volgisticsshifts_results if row["source_type"] == "shelterluvpeople": adoptions = [] person = requests.get("http://shelterluv.com/api/v1/people/{}".format(row["source_id"]), headers={"x-api-key": SHELTERLUV_SECRET_TOKEN}) person_json = person.json() animal_ids = person_json["Animal_ids"] for animal_id in animal_ids: animal_events = requests.get("http://shelterluv.com/api/v1/animals/{}/events".format(animal_id), headers={"x-api-key": SHELTERLUV_SECRET_TOKEN}) animal_events_json = animal_events.json() for event in animal_events_json["events"]: for adoption in event["AssociatedRecords"]: if adoption["Type"] == "Person" and adoption["Id"] == row["source_id"]: del event["AssociatedRecords"] animal_details = requests.get( "http://shelterluv.com/api/v1/animals/{}".format(animal_id), headers={"x-api-key": SHELTERLUV_SECRET_TOKEN}) animal_details_json = animal_details.json() event["animal_details"] = animal_details_json adoptions.append(event) result['adoptions'] = adoptions return jsonify({'result': result})
def get_contacts(search_text): with engine.connect() as connection: search_text = search_text.lower() names = search_text.split(" ") if len(names) == 2: query = text( "select * from pdp_contacts where archived_date is null AND ( \ (lower(first_name) like lower(:name1) and lower(last_name) like lower(:name2)) \ OR (lower(first_name) like lower(:name2) and lower(last_name) like lower(:name1)) )\ order by lower(last_name), lower(first_name)") query_result = connection.execute(query, name1='{}%'.format(names[0]), name2='{}%'.format(names[1])) elif len(names) == 1: query = text("select * from pdp_contacts \ WHERE lower(first_name) like lower(:search_text) \ OR lower(last_name) like lower(:search_text) order by lower(last_name), lower(first_name)" ) query_result = connection.execute( query, search_text='{}%'.format(search_text)) query_result_json = [dict(row) for row in query_result] results = jsonify({'result': query_result_json}) return results
def get_person_animal_events(matching_id, animal_id): result = {} events = [] with engine.connect() as connection: query = text( "select * from pdp_contacts where matching_id = :matching_id and source_type = 'shelterluvpeople' and archived_date is null" ) query_result = connection.execute(query, matching_id=matching_id) rows = [dict(row) for row in query_result] if len(rows) > 0: row = rows[0] shelterluv_id = row["source_id"] animal_url = f"http://shelterluv.com/api/v1/animals/{animal_id}/events" event_details = requests.get(animal_url, headers={ "x-api-key": SHELTERLUV_SECRET_TOKEN }).json() for event in event_details["events"]: for record in event["AssociatedRecords"]: if record["Type"] == "Person" and record[ "Id"] == shelterluv_id: events.append(event) result[animal_id] = events return result
def get_animals(matching_id): result = {} with engine.connect() as connection: query = text( "select * from pdp_contacts where matching_id = :matching_id and source_type = 'shelterluvpeople' and archived_date is null" ) query_result = connection.execute(query, matching_id=matching_id) rows = [dict(row) for row in query_result] if len(rows) > 0: row = rows[0] shelterluv_id = row["source_id"] person_url = f"http://shelterluv.com/api/v1/people/{shelterluv_id}" person_details = requests.get(person_url, headers={ "x-api-key": SHELTERLUV_SECRET_TOKEN }).json() animal_ids = person_details["Animal_ids"] for animal_id in animal_ids: animal_url = f"http://shelterluv.com/api/v1/animals/{animal_id}" animal_details = requests.get(animal_url, headers={ "x-api-key": SHELTERLUV_SECRET_TOKEN }).json() result[animal_id] = animal_details return result
def get_360(salesforce_id): result = {'salesforcecontacts': None} with engine.connect() as connection: query_result = connection.execute( "select * from master where salesforcecontacts_id='{}'".format( salesforce_id)) master_row = {'result': [dict(row) for row in query_result]} query_result = connection.execute( "select * from salesforcecontacts where contact_id='{}'".format( salesforce_id)) if 'salesforcecontacts' in result: result['salesforcecontacts'] = [dict(row) for row in query_result][0] query_result = connection.execute( "select * from petpoint where outcome_person_num='{}'".format( master_row['result'][0]['petpoint_id'])) if 'petpoint' in result: result['petpoint'] = [dict(row) for row in query_result][0] if master_row['result'][0]['volgistics_id']: query_result = connection.execute( "select * from volgistics where number='{}'".format( master_row['result'][0]['volgistics_id'])) if 'volgistics' in result: result['volgistics'] = [dict(row) for row in query_result][0] return jsonify(result)
def user_refresh(): """ If user still active, send back an access_token with a new expiration stamp """ old_jwt = jwt_ops.validate_decode_jwt() # If token bad, should be handled & error message sent by jwt_required() and we won't get here if old_jwt: user_name = old_jwt['sub'] with engine.connect() as connection: s = text("""select active from pdp_users where username=:u """) s = s.bindparams(u=user_name) result = connection.execute(s) if result.rowcount: # Did we get a match on username? is_active = result.fetchone() else: log_user_action( user_name, "Failure", "Valid JWT presented for refesh attempt on unknown username" ) return jsonify("Bad credentials"), 401 if is_active[0].lower() == 'y': # In the user DB and still Active? token = jwt_ops.create_token(user_name, old_jwt['role']) log_user_action(user_name, "Success", "Refreshed token") return token else: return jsonify("Bad credentials"), 401
def afficher_chantier(): select_stmt = select( [ Chantier_tbl.c.nom_chantier, Chantier_tbl.c.username, Chantier_tbl.c.adresse, ] ) conn = engine.connect() results = conn.execute(select_stmt) chantiers = [] for result in results: chantiers.append( { "nom_chantier": result[0], "username": result[1], "adresse": result[2], } ) conn.close() return chantiers
def list_statistics(): """ Pull Last Execution stats from DB. """ current_app.logger.info("list_statistics() request") last_execution_details = '{}' # Empty but valid JSON engine.dispose() # we don't want other process's conn pool with engine.connect() as conn: try: # See Alembic Revision ID: 05e0693f8cbb for table definition s = text( "select valcol from kv_unique where keycol = 'last_execution_time';" ) result = conn.execute(s) if result.rowcount > 0: last_execution_details = result.fetchone()[0] except Exception as e: current_app.logger.error( "Failure reading Last Execution stats from DB - OK on first run" ) # Will happen on first run, shouldn't after return last_execution_details
def update_table_chantier(page): ''' Conversion de la requete SQL en dictionnaire''' with engine.connect() as con: return pd.read_sql("chantier", con=con)[["nom_chantier", "username", "adresse"]].to_dict("records")
def get_user_count(): """Return number of records in pdp_users table """ with engine.connect() as connection: s = text("select count(user) from pdp_users;") result = connection.execute(s) user_count = result.fetchone() return jsonify(user_count[0])
def load_to_postgres(csv_path, table_name, drop_first_col=False): # connect to or create database # load csv into a dataframe df = pd.read_csv(csv_path, encoding='cp1252') # drop the first column - so far all csvs have had a first column that's an index and doesn't have a name if drop_first_col: df = df.drop(df.columns[0], axis=1) # strip whitespace and periods from headers, convert to lowercase df.columns = df.columns.str.lower().str.strip() df.columns = df.columns.str.replace(' ', '_') df.columns = df.columns.map(lambda x: re.sub(r'\.+', '_', x)) # get conneciton from engine and use in with clause to automatically handle transaction cleanup with engine.connect() as connection: connection.execute(f'DROP TABLE IF EXISTS {table_name}') # load dataframe into database table current_app.logger.info('Creating table: ' + table_name) df.to_sql( table_name, engine, index=False, ) current_app.logger.info('Finished creating generic table for: ' + table_name)
def __find_new_rows(result, table_name): source_id = DATASOURCE_MAPPING[table_name]['id'] current_app.logger.info(table_name + ' ' + source_id) with engine.connect() as conn: # find new rows rows = conn.execute( 'select t.* from {} t left join {} v on v."{}" = t."{}" where v."{}" is null' .format(table_name + "_stage", table_name, source_id, source_id, source_id)) rows_data = [] now = datetime.now() tracked_columns = DATASOURCE_MAPPING[table_name]['tracked_columns'] for row in rows: row_dict = {} json_dict = {} for key_value in row.items(): if key_value[0] in tracked_columns: if key_value[0] == DATASOURCE_MAPPING['volgistics']['id']: row_dict[key_value[0]] = str(key_value[1]) else: row_dict[key_value[0]] = key_value[1] json_dict[key_value[0]] = key_value[1] row_dict['json'] = json_dict row_dict['created_date'] = now rows_data.append(row_dict) result['new_rows'][table_name] = rows_data ins = meta.tables[table_name].insert() conn.execute(ins, rows_data)
def get_360(matching_id): result = {} with engine.connect() as connection: query = text("select * from pdp_contacts where matching_id = :matching_id and archived_date is null") query_result = connection.execute(query, matching_id=matching_id) result["contact_details"] = [dict(row) for row in query_result] result["shifts"] = [] result["donations"] = [] result["adoptions"] = [] # todo: complete retrieving details for response for row in query_result: if row["source_type"] == "volgistics": query = text("select * from volgisticsshifts where number = :volgistics_id") query_result = connection.execute(query, volgistics_id=row["source_id"]) result["shifts"] += [dict(row) for row in query_result] ''' query = text("select * from salesforcedonations where contact_id = :salesforcecontacts_id") query_result = connection.execute(query, salesforcecontacts_id=salesforcecontacts_id) salesforcedonations_results = [dict(row) for row in query_result] if salesforcedonations_results: result['salesforcedonations'] = salesforcedonations_results ''' return jsonify({'result': result})
def show_plan(): select_cmd = select( [Plan_tbl.c.strategy_name, Plan_tbl.c.stock_code, Plan_tbl.c.capital]) conn = engine.connect() rs = conn.execute(select_cmd) conn.close() return rs
def add_plan(user_id, strategy_name, stock_code, capital): insert_cmd = Plan_tbl.insert().values(user_id=user_id, strategy_name=strategy_name, stock_code=stock_code, capital=capital) conn = engine.connect() conn.execute(insert_cmd) conn.close()
def reset_habit(username: str, habit: str): conn = engine.connect() stmt = Habit_tbl.update().\ values(count=0).\ where(Habit_tbl.c.username == username and Habit_tbl.c.habit == habit) conn.execute(stmt) conn.close() return 0
def update_choix_chantier(page): with engine.connect() as con: query = f"SELECT * FROM chantier where username = '******'" liste_chantiers = pd.read_sql_query(query, con=con).nom_chantier.tolist() if len(liste_chantiers) == 0: return [] else: return [{"label": chantier, "value": chantier} for chantier in liste_chantiers]
async def execute_sql(sql: str) -> ResultProxy: conn = engine.connect() try: return conn.execute(sql) except BaseException as e: raise e finally: conn.close()
def add_forecast(stock_code, future): conn = engine.connect() for index, row in future.iterrows(): insert_cmd = Forecast_tbl.insert().values(stock_code=stock_code, date=index, close=row['close']) conn.execute(insert_cmd) conn.close()
def maj_secteur(nom_secteur, nom_chantier, lat1, lat2, lon1, lon2): update = (Secteur_tbl.update().values( lat1=lat1, lat2=lat2, lon1=lon1, lon2=lon2).where((Secteur_tbl.c.nom_secteur == nom_secteur) & (Secteur_tbl.c.nom_chantier == nom_chantier))) conn = engine.connect() conn.execute(update) conn.close()
def add_optimal(strategy_name, stock_code, capital, x, y): insert_cmd = Optimal_tbl.insert().values(strategy_name=strategy_name, stock_code=stock_code, capital=capital, x=x, y=y) conn = engine.connect() conn.execute(insert_cmd) conn.close()
def update_password(username, password): hashed_password = generate_password_hash(password, method="sha256") update = (User_tbl.update().values(password=hashed_password).where( User_tbl.c.username == username)) conn = engine.connect() conn.execute(update) conn.close()
def add_user(username, password, role): hashed_password = generate_password_hash(password, method='sha256') ins = User_tbl.insert().values( username=username, password=hashed_password, role=role) conn = engine.connect() conn.execute(ins) conn.close()
def maj_capteur(nom_capteur, nom_chantier, type, lat, lon): update = (Capteur_tbl.update().values( lat=lat, lon=lon).where((Capteur_tbl.c.nom_capteur == nom_capteur) & (Capteur_tbl.c.nom_chantier == nom_chantier))) conn = engine.connect() conn.execute(update) conn.close()
def show_all_credit(): select_st = select([Credit_tbl.c.id, Credit_tbl.c.card_num]) conn = engine.connect() rs = conn.execute(select_st) for row in rs: print(row) conn.close()
from sqlalchemy import Table, Column, ForeignKey, MetaData from sqlalchemy.dialects.mysql import VARCHAR, BOOLEAN, TEXT, INTEGER, DATE from datetime import datetime from config import engine metadata = MetaData() connection = engine.connect() department = Table( "Department", metadata, Column("id", INTEGER, primary_key=True), Column("name", VARCHAR(127), nullable=False) ) customer = Table( "Customer", metadata, Column("id", INTEGER, primary_key=True), Column("first_name", VARCHAR(127), nullable=False), Column("last_name", VARCHAR(127), nullable=False), Column("department_id", INTEGER, ForeignKey("department.id"), nullable=True), ) computer = Table( "Computer", metadata, Column("id", INTEGER, primary_key=True), Column("customer_id", INTEGER, ForeignKey("customer.id"), nullable=False), Column("po_number", VARCHAR(127), nullable=True), Column("hw_type", TEXT, nullable=True), Column("customization", TEXT, nullable=True), Column("notes", TEXT, nullable=True),