def get_nearest_poi(): lat = request.args.get('lat', -6.6160933) lon = request.args.get('lon', 106.8266368) radius = request.args.get('radius', 1000) lonlat = str(lon) + ' ,' + str(lat) engine = create_engine(config['default'].SQLALCHEMY_BINDS['kotabogor']) sql = """SELECT kegiatan, nama_objek, ST_X(st_transform(wkb_geometry, 4326)) as longitude, ST_Y(st_transform(wkb_geometry, 4326)) as latitude, wkb_geometry FROM tematik.gis_poi WHERE ST_DWithin(wkb_geometry, ST_TRANSFORM(ST_SetSRID(ST_MakePoint(%s), 4326), 32748), %s) """ % ( lonlat, radius) with engine.connect() as con: result = con.execute(sql) resultkey = con.execute(sql).keys() rows = result.fetchall() output = [] for item in rows: inner = {} for key, val in zip(resultkey, item): inner[key] = str(val) output.append(inner) return jsonify({'status': 'success', 'data': output})
def execute_sql(self, action=""): """ 数据库操作,目前支持自定义(为空),插入(insert)和删除(delete) :param action: "":执行自己传入sql语句(sql_statement) insert:执行insert语句 delete:执行insert语句 :return: """ with engine.connect() as connection: trans = connection.begin() try: if action == "insert": sql_operating = self.insert_sql() connection.execute(sql_operating) if action == "delete": sql_operating = self.delete_sql() for i in sql_operating: connection.execute(sql_operating) if action == "": sql_operating = self.sql_statement connection.execute(sql_operating) trans.commit() except Exception as e: trans.rollback() # 回滚事务 print("执行sql操作失败:" + e)
def get_inspectors(self) -> Iterable[Inspector]: url = self.config.get_sql_alchemy_url(database=None) logger.debug(f"sql_alchemy_url={url}") db_listing_engine = create_engine( url, connect_args=self.config.get_sql_alchemy_connect_args(), **self.config.options, ) for db_row in db_listing_engine.execute(text("SHOW DATABASES")): db = db_row.name if self.config.database_pattern.allowed(db): # We create a separate engine for each database in order to ensure that # they are isolated from each other. self.current_database = db engine = create_engine( self.config.get_sql_alchemy_url(database=db), connect_args=self.config.get_sql_alchemy_connect_args(), **self.config.options, ) with engine.connect() as conn: inspector = inspect(conn) yield inspector else: self.report.report_dropped(db)
def get_bbox(): engine = create_engine(config['default'].SQLALCHEMY_BINDS['kotabogor']) minx = request.args.get('xmin', 0) miny = request.args.get('ymin', 0) maxx = request.args.get('xmax', 0) maxy = request.args.get('ymax', 0) bbox = str(minx) + ',' + str(miny) + ',' + str(maxx) + ',' + str(maxy) sql = """SELECT kegiatan, nama_objek, ST_X(st_transform(wkb_geometry, 4326)) as longitude, ST_Y(st_transform(wkb_geometry, 4326)) as latitude FROM tematik.gis_poi WHERE ST_Intersects( ST_SetSRID( ST_MakePoint( ST_X(st_transform(wkb_geometry, 4326)), ST_Y(st_transform(wkb_geometry, 4326)) ), 4326), ST_MakeEnvelope(%s, 4326) )='t' """ % (bbox) with engine.connect() as con: result = con.execute(text(sql)) resultkey = con.execute(text(sql)).keys() rows = result.fetchall() output = [] for item in rows: inner = {} for key, val in zip(resultkey, item): inner[key] = (val) output.append(inner) return jsonify({'status': 'success', 'data': output})
def get_checkin_points_in_poly(): engine = create_engine(config['default'].SQLALCHEMY_BINDS['kotabogor']) kode_kec = request.args.get('kode', 3271010) sql = """ SELECT kegiatan , nama_objek , st_x(st_transform(wkb_geometry, 4326)) as longitude, st_y(st_transform(wkb_geometry, 4326)) as latitude FROM tematik.gis_poi WHERE ST_Contains( (SELECT geom from administrasi.gis_admin_kec where kode_kec = %s limit 1), tematik.gis_poi.wkb_geometry ); """ % (kode_kec) with engine.connect() as con: result = con.execute(text(sql)) resultkey = result.keys() rows = result.fetchall() output = [] for item in rows: inner = {} for key, val in zip(resultkey, item): vf = val if key == 'geojson' else str(val) inner[key] = vf output.append(inner) return jsonify({ 'status': 'success', 'data': output, 'params': kode_kec })
def feeds() -> engine.row.LegacyRow: engine = create_engine(URL_DATABASE, echo=True) stmt = select(create_table.url_table.c.url) with engine.connect() as conn: result = conn.execute(stmt) for link in result: yield link
def authenticate(self, user, password): config = { 'user': user, 'password': password, 'host': 'localhost', 'port': 3306, 'database': 'akash' } df_user = config['user'] df_password = config['password'] df_host = config['host'] df_port = config['port'] df_database = config['database'] engine = create_engine( f'mysql+pymysql://{df_user}:{df_password}@{df_host}:{df_port}/{df_database}' ) with engine.connect() as conn: conn.execute(''' CREATE TABLE IF NOT EXISTS IIITL ( name varchar(40), batch varchar(10), email varchar(60) NOT NULL UNIQUE, phone bigint(10) ); ''') return engine
def get_administrasi(): engine = create_engine(config['default'].SQLALCHEMY_BINDS['kotabogor']) sql = """SELECT kode_kec, kecamatan, jsonb_build_object( 'type', 'Feature', 'id', id_1 , 'geometry', ST_AsGeoJSON(st_transform(geom, 3857))::json, 'properties', json_build_object( 'kecamatan', kecamatan, 'kode_kec', kode_kec ) )::json as geojson from administrasi.gis_admin_kec""" with engine.connect() as con: result = con.execute(text(sql)) resultkey = con.execute(text(sql)).keys() rows = result.fetchall() output = [] for item in rows: inner = {} for key, val in zip(resultkey, item): vf = val if key == 'geojson' else str(val) inner[key] = vf output.append(inner) return jsonify({'status': 'success', 'data': output})
def get_last_time(engine, project_name: str): with engine.connect() as conn: time = conn.execute( f"select insert_time from {project_name} order by rowid desc limit 1" ).fetchone()[0] time = datetime.datetime.strptime( time.split(".")[0], "%Y-%m-%d %H:%M:%S") return time
def db_conn(df, DB_NAME , table_name): # MySql connection in sqlAlchemy engine = create_engine('mysql://*****:*****@localhost:3306/'+DB_NAME+'?charset=utf8') connection = engine.connect() # Do not insert the row number (index=False) df.to_sql(name=table_name, con=engine, if_exists='append', flavor='mysql', index=False, chunksize=2000) connection.close()
def populate_session_grading_period(engine: engine.base.Engine): SESSION_GRADING_PERIOD_KEY = "session grading period" if SESSION_GRADING_PERIOD_KEY in already_loaded.keys(): return with engine.connect() as connection: connection.execute(POPULATE_SESSION_GRADING_PERIOD_SQL) already_loaded[SESSION_GRADING_PERIOD_KEY] = "loaded"
def get_approvers(): approvers = [] engine = DB.get_engine() with engine.connect() as con: query = text("SELECT id, name, email " "FROM approver ") rs = con.execute(query).fetchall() for row in rs: approver = Approver(row["id"], row["name"], row["email"], "") approvers.append(approver) return approvers
def create_table_2015y(): engine = create_engine("mysql+pymysql://" + mysqluser + ":" + mysqlkey + "@localhost:3306/project_data") conn = engine.connect() conn.execute(""" drop table if exists y2015; """) conn.execute(""" create table y2015 as SELECT * from vnq_5y Where year(Date) = '2015'; """)
def assign_trainings(user_id, training_ids, approver_id): engine = DB.get_engine() with engine.connect() as con: for training_id in training_ids: query = text( "INSERT INTO training_assignment (training_id, user_id, approver_id) VALUES(:x, :y, :z)" ) rs = con.execute(query, x=training_id, y=user_id, z=approver_id)
def approve_trainings(approver_id, assigned_training_ids): engine = DB.get_engine() with engine.connect() as con: for assigned_training_id in assigned_training_ids: query = text( "UPDATE training_assignment SET approved = 1, approver_id = :x, date_approved = :y " "WHERE id IN (:z)") rs = con.execute(query, x=approver_id, y=datetime.now(), z=assigned_training_id)
def test_branched_connection_execution_options(self): engine = engines.testing_engine("sqlite://") conn = engine.connect() c2 = conn.execution_options(foo="bar") with testing.expect_deprecated_20( r"The Connection.connect\(\) method is considered " ): c2_branch = c2.connect() eq_(c2_branch._execution_options, {"foo": "bar"})
def verify_user(email): engine = DB.get_engine() with engine.connect() as con: query = text("SELECT user.id " "FROM user " "WHERE LOWER(user.email) = :x") rs = con.execute(query, x=email.lower()).fetchall() if (len(rs)) > 0: return [row[0] for row in rs][0] else: return 0
def db_name(db_host: str, db_user: str) -> typing.Iterable[str]: admin_user = os.environ.get('TRIBBLE_DB_ADMIN_USER', 'root') admin_password = os.environ.get('TRIBBLE_DB_ADMIN_PASSWORD') creds = database.Creds(host=db_host, user=admin_user, password=admin_password, database='mysql') with warnings.catch_warnings(): warnings.filterwarnings('ignore', ".*\'@@tx_isolation\' is deprecated.*") engine = database.connect_db(creds) connection = engine.connect() db_names = connection.execute('SHOW DATABASES;').fetchall() for (db_name, ) in db_names: if db_name.startswith('tribble_test_'): engine.execute(f'DROP SCHEMA {db_name}') database_name = 'tribble_test_{0:0>6}'.format( random.randrange(1, 1000000)) connection.execute(f'CREATE SCHEMA {database_name};') connection.execute(f'USE {database_name};') connection.execute( f'GRANT ALL ON {database_name}.* TO {db_user}@{db_host};') connection.execute('FLUSH PRIVILEGES;') connection.close() yield database_name connection = engine.connect() connection.execute(f'DROP SCHEMA {database_name};') connection.execute( f'REVOKE ALL ON {database_name}.* FROM {db_user}@{db_host};') connection.execute('FLUSH PRIVILEGES;') connection.close()
def _read_and_load_descriptors(engine: engine.base.Engine, descriptor_type: str) -> None: descriptor = f"{descriptor_type}Descriptor" file_path = os.path.join("..", "..", "extension", "Descriptors", f"{descriptor}.xml") df = pd.read_xml(file_path) # type: ignore with engine.connect() as connection: for _, row in df.iterrows(): sql = _prepare_descriptor_sql(row, SCHEMA_LMSX, descriptor) connection.execute(text(sql))
def index(): #Get the page_id from the first page osql = "select page_id from page where page_order = 1" p = 0 #Connect to app database dbURL = readPgpass(app_name, user) engine = create_engine(dbURL) conn = engine.connect() page_result = conn.execute(osql) for r in page_result: p = r[0] return redirect(url_for('content', page_id=p))
def load_grading_period(engine: engine.base.Engine, grading_period_table: str) -> None: GRADING_PERIOD_KEY = "Grading Period" GRADING_PERIOD_DESCRIPTOR_KEY = "Grading Period Descriptor" DESCRIPTOR_NAMESPACE = "uri://ed-fi.org/Descriptor" grading_periods_df = read_keyvalue_pairs_as_dataframe(grading_period_table) grading_period_descriptor = str(grading_periods_df["Descriptor"].iloc[0]) # Add descriptor for grading period if GRADING_PERIOD_DESCRIPTOR_KEY not in already_loaded.keys(): already_loaded[GRADING_PERIOD_DESCRIPTOR_KEY] = [] if grading_period_descriptor not in already_loaded[ GRADING_PERIOD_DESCRIPTOR_KEY]: descriptor = pd.Series({ "CodeValue": grading_period_descriptor, "ShortDescription": grading_period_descriptor, "Description": grading_period_descriptor, "Namespace": "uri://ed-fi.org/Descriptor", }) descriptor_sql = _prepare_descriptor_sql(descriptor, SCHEMA_EDFI, "GradingPeriodDescriptor") with engine.connect() as connection: connection.execute(text(descriptor_sql)) already_loaded[GRADING_PERIOD_DESCRIPTOR_KEY].append( grading_period_descriptor) descriptor_id = _get_descriptor_id_by_codevalue_and_namespace( engine, grading_period_descriptor, DESCRIPTOR_NAMESPACE) # Now add the grading period if GRADING_PERIOD_KEY not in already_loaded.keys(): already_loaded[GRADING_PERIOD_KEY] = [] grading_period_value = (str(grading_periods_df["PeriodSequence"].iloc[0]) + str(grading_periods_df["Descriptor"].iloc[0]) + str(grading_periods_df["SchoolId"].iloc[0]) + str(grading_periods_df["SchoolYear"].iloc[0])) if grading_period_value in already_loaded[GRADING_PERIOD_KEY]: return grading_periods_df.rename( columns={"Descriptor": "GradingPeriodDescriptorId"}, inplace=True) grading_periods_df["GradingPeriodDescriptorId"] = descriptor_id grading_periods_df.to_sql("GradingPeriod", **_get_edfi_options(engine)) already_loaded[GRADING_PERIOD_KEY].append(grading_period_value)
def getNewPics(freq,p_numb,db_name,table_name): start = time.time() while True: end = time.time() pic_set = set([]) payload = np.array([]) if abs(end-start) > freq: pic_array = getRecent(p_numb,flickr) for elem in pic_array: pic_id, url, lat, lon = elem if pic_id not in pic_set: classification = get_and_classify([lat,lon,url,pic_id]) if classification != None: print pic_id payload = np.append(payload,[classification]) payload = payload.reshape((-1,23)) db_loader = pd.DataFrame(payload, columns=('lat','lon','url','scene1','scene2','scene3','scene4','scene5',\ 'sval1','sval2','sval3','sval4','sval5','obj1','obj2','obj3','obj4','obj5',\ 'oval1','oval2','oval3','oval4','oval5')) #print db_loader.describe() #db_loader.loc = payload engine = create_engine('mysql://*****:*****@localhost:3306/'+db_name+'?charset=utf8') connection = engine.connect() max_ID_q = connection.execute("select max(ID) from "+TABLE_NAME+";") max_ID = -1 for item in max_ID_q: max_ID = item[0] indexer = range(max_ID+1, max_ID+1+db_loader.shape[0]) db_loader['ID'] = indexer # Do not insert the row number (index=False) db_loader.to_sql(name=table_name, con=engine, if_exists='append', flavor='mysql',dtype={'url': VARCHAR(255)}, index=False, chunksize=5000) connection.close() if pic_array.shape[0]>0: pic_set = set(pic_array[:,0]) payload start = end time.sleep(5)
def get_knowledge_areas(): engine = DB.get_engine() knowledge_areas = [] with engine.connect() as con: query = text( "SELECT training_knowledge_area.id, training_knowledge_area.knowledge_area " "FROM training_knowledge_area ") rs = con.execute(query).fetchall() for row in rs: knowledge_area = Training_Knowledge_Area( row["id"], row["knowledge_area"]) knowledge_areas.append(knowledge_area) return knowledge_areas
def get_approver_finance(email): engine = DB.get_engine() with engine.connect() as con: query = text("SELECT id, name, email, code " "FROM finance_approver " "WHERE email = :x") rs = con.execute(query, x=email).fetchall() if (len(rs) > 0): approver = Approver(rs[0]["id"], rs[0]["name"], rs[0]["email"], rs[0]["code"]) else: approver = Approver(0, "", "", "") return approver
def load_student_association(engine: engine.base.Engine, student_unique_id: str, school_id: str) -> None: student_usi = _get_student_usi(engine, student_unique_id) # Student enrollment requires a SexType descriptor - not nullable SEX_TYPE_DESCRIPTOR = "Not identified" SEX_TYPE_DESCRIPTOR_KEY = "Sex Type" DESCRIPTOR_NAMESPACE = "uri://ed-fi.org/SexType" if SEX_TYPE_DESCRIPTOR_KEY not in already_loaded.keys(): already_loaded[SEX_TYPE_DESCRIPTOR_KEY] = [] if SEX_TYPE_DESCRIPTOR not in already_loaded[SEX_TYPE_DESCRIPTOR_KEY]: descriptor = pd.Series({ "CodeValue": SEX_TYPE_DESCRIPTOR, "ShortDescription": SEX_TYPE_DESCRIPTOR, "Description": SEX_TYPE_DESCRIPTOR, "Namespace": DESCRIPTOR_NAMESPACE, }) descriptor_sql = _prepare_descriptor_sql(descriptor, SCHEMA_EDFI, "SexDescriptor") with engine.connect() as connection: connection.execute(text(descriptor_sql)) already_loaded[SEX_TYPE_DESCRIPTOR_KEY].append(SEX_TYPE_DESCRIPTOR) descriptor_id = _get_descriptor_id_by_codevalue_and_namespace( engine, SEX_TYPE_DESCRIPTOR, DESCRIPTOR_NAMESPACE) # Now we can enroll the student at the school ENROLL_KEY = "Enrollment" enroll_value = f"{school_id}{student_usi}" if ENROLL_KEY not in already_loaded.keys(): already_loaded[ENROLL_KEY] = [] if enroll_value in already_loaded[ENROLL_KEY]: return enroll_df = pd.DataFrame([{ "EducationOrganizationId": school_id, "StudentUSI": student_usi, "SexDescriptorId": descriptor_id, }]) enroll_df.to_sql("StudentEducationOrganizationAssociation", **_get_edfi_options(engine)) already_loaded[ENROLL_KEY].append(enroll_value)
def create_db(engine: engine.base.Engine, database_name: str, runtime_user: str, runtime_host: str, force: bool = False) -> None: connection = engine.connect() if force: connection.execute(f'DROP SCHEMA IF EXISTS {database_name};') connection.execute(f'CREATE SCHEMA {database_name};') connection.execute( f'GRANT ALL PRIVILEGES ON {database_name}.* to {runtime_user}@{runtime_host};' ) connection.execute('FLUSH PRIVILEGES;') connection.close()
def get_pip(): engine = create_engine(config['default'].SQLALCHEMY_BINDS['kotabogor']) lat = request.args.get('lat', -6.6160933) lon = request.args.get('lon', 106.8266368) lonlat = str(lon) + ' ,' + str(lat) sql = """ SELECT kecamatan, kode_kec FROM administrasi.gis_admin_kec gak WHERE ST_Contains( st_transform(gak.geom, 4326), st_setsrid(st_makepoint(%s), 4326))""" % (lonlat) with engine.connect() as con: result = con.execute(text(sql)) rows = result.fetchall() jml = len(rows) return jsonify({'status': 'success', 'data': jml})
def getPgDBnames(user): #get sqlalchemy functions to use from sqlalchemy import create_engine, engine, exc #make the connection usng the default database for the type svURL = readPgpass('postgres', user) #the query that gets the database names: this query is not misspelled! dbq = 'select datname from pg_database where datacl is null order by datname' try: engine = create_engine(svURL) conn = engine.connect() dbnames = conn.execute(dbq) conn.close() return(dbnames) except exc.SQLAlchemyError as detail: print(dbq) fatal("Could not query : %s" % detail)
def connect_to_db(schema=None, **kwargs): """ Connect to a MySQL database using keyword arguments. """ if not schema: schema = kwargs.get('dbname') # build the connection uri uri = '{engine}://{username}:{password}@{host}/{schema}?local_infile=1'.format(schema=schema, **kwargs) # create the connection pool engine = sqlalchemy.create_engine(uri, pool_recycle=3600) # test the engine by making a single connection with engine.connect(): return engine
def get_inspectors(self) -> Iterable[Inspector]: db_listing_engine = self.get_metadata_engine(database=None) for db_row in db_listing_engine.execute(text("SHOW DATABASES")): db = db_row.name if self.config.database_pattern.allowed(db): # We create a separate engine for each database in order to ensure that # they are isolated from each other. self.current_database = db engine = self.get_metadata_engine(database=db) with engine.connect() as conn: inspector = inspect(conn) yield inspector else: self.report.report_dropped(db)
def get_3d(): engine = create_engine(config['default'].SQLALCHEMY_BINDS['default']) sql = """ SELECT st_asgeojson(st_transform(geom, 3857))::json as geojson FROM public.kota_building""" with engine.connect() as con: result = con.execute(text(sql)) resultkey = result.keys() rows = result.fetchall() output = [] for item in rows: inner = {} for key, val in zip(resultkey, item): vf = val if key == 'geojson' else str(val) inner[key] = vf output.append(inner) return jsonify({'status': 'success', 'data': output})
def get_trainings(): engine = DB.get_engine() trainings = [] with engine.connect() as con: query = text( "SELECT training.id, training_category.category, training_knowledge_area.knowledge_area, training.title, training.link, training.description, training.free, training.advanced " "FROM training, training_category, training_knowledge_area " "WHERE training.category_id = training_category.id " "AND training.knowledge_area_id = training_knowledge_area.id ") rs = con.execute(query).fetchall() for row in rs: training = Training(row["id"], row["category"], row["knowledge_area"], row["title"], row["link"], row["description"], row["free"], row["advanced"]) trainings.append(training) return trainings
# MySql connection in sqlAlchemy engine = create_engine('mysql://*****:*****@localhost:3306/'+DB_NAME+'?charset=utf8') connection = engine.connect() # Do not insert the row number (index=False) df.to_sql(name=table_name, con=engine, if_exists='append', flavor='mysql', index=False, chunksize=2000) connection.close() if __name__ == '__main__': pics = pd.read_csv(csv_file,index_col=None) #If table exists then set index ('ID') to be at the end of the current table. try: engine = create_engine('mysql://*****:*****@localhost:3306/'+DB_NAME+'?charset=utf8') connection = engine.connect() max_ID_q = connection.execute("select max(ID) from "+TABLE_NAME+";") max_ID = -1 for item in max_ID_q: max_ID = item[0] indexer = range(max_ID+1, max_ID+1+pics.shape[0]) pics['ID'] = indexer except: pics['ID'] = range(pics.shape[0]) db_conn(pics, DB_NAME, TABLE_NAME)
client_id = '67d854ceaa5af4c' client_secret = 'f632dc2515c06e87c0be56f2377479901a8cf5aa' client = ImgurClient(client_id, client_secret) name = client.credits # print client.get_account_images('andrewsyc') # print client.credits print name engine = create_engine('mysql+pymysql://root:magical18' '@localhost/imgur') engine.connect() ''' Upload images ''' # images = os.listdir("images") # for image in images: # print image # with open(os.path.dirname(os.path.realpath(__file__)) + '/images/' + image, "rb") as image_file: # encoded_string = base64.b64encode(image_file.read()) # # These code snippets use an open-source library. # # These code snippets use an open-source library. # response = unirest.post("https://imgur-apiv3.p.mashape.com/3/image", # headers={ # "X-Mashape-Key": "huYA3ztRaxmshy95Mcj4dTmVrMTHp1iQ858jsn3jpASEst4dig", # "Authorization": "Client-ID 67d854ceaa5af4c",