def _cancel_all(connstring, label): """cancel_all sessions where client_label matches label. to use this, you must set LABEL to some unique value inside your connection string """ q = """select session_id, statement_id from v_monitor.sessions where client_label='%s'""" % label conn = pyodbc.connect(connstring, ansi=True) data = conn.cursor().execute(q).fetchall() _interrupt_statements(conn, data)
def setUp(self): pyodbc.enable_mem_guards(True) pyodbc.enable_tracing(_enable_logging) self.assertIsNotNone(_conn_string, msg=_error_string) try: self.conn = pyodbc.connect(_conn_string, unicode_results=_unicode_results, timeout=3) except Exception as e: raise Exception( 'It seems that your {0} is not setup correctly. Attempting to connect resulted in:\n{1}' .format(_conn_string_key, e.args[1]))
def _cancel(connstring, timeout, queryid): """after some timeout, close the statement associated with queryid. queryid should be some uuid you add via sql comments """ time.sleep(timeout) conn = pyodbc.connect(connstring, ansi=True) q = """ select session_id, statement_id from v_monitor.sessions where current_statement like '%%%s%%' and current_statement not like '%%v_monitor.sessions%%'; """ q = q % queryid data = conn.cursor().execute(q).fetchall() if len(data) == 1: _interrupt_statements(conn, data)
def main(conn_str): print ('Connecting to data source...') conn = odbc.connect(conn_str) print ('Building the table...') ensure_table(conn) print ('Trying queries...') t1 = time() query_with_time_out(conn, 5.0) t2 = time() query(conn) t3 = time() print ('query ellapsed %d s, query_with_timeout ellapsed %d s' % (t3-t2, t2-t1))
def generate_tables(count): import os, binascii import random import datetime print("Generating tables for tests (%s elements)" % repr(count)) conn = pyodbc.connect(_connect_string) cur = conn.cursor() for tab in tables: print("Table %s: %s" % (tab["name"], tab["descr"])) verbose_exec(cur, "drop table if exists %s" % (tab["name"])) verbose_exec(cur, "create table %s (val %s)" %(tab["name"], tab["type"])) values = tab["generator"](count/2) values.extend([(None,)] * (count - len(values))) # add nulls random.shuffle(values) #and shuffle cur.executemany("insert into %s values(?)" % (tab["name"],) , values) cur.commit() conn.close()
def read_only_connect(): return pyodbc.connect(_connect_string, ansi=True, unicode_results=False, readonly=True)
def connect(conn_str): print('Connecting to data source...') return odbc.connect(conn_str)
dictarray = cursor.fetchdictarray(N // steps) t1 = time() - t0 if profile: show_stats("After reading...", tref) print "[fetchdictarray twice] Retrieved %d rows in %.3fs" % (len( dictarray['volume_']), t1) print "Last row:", [(name, arr[-1]) for name, arr in dictarray.iteritems()] del dictarray # del cursor time4 = t1 return (time1, time2, time3, time4) if __name__ == "__main__": # set up a connection connection = pyodbc.connect(_default_connect_string) cursor = connection.cursor() import sys if len(sys.argv) > 1 and sys.argv[1] == "write": write(cursor) if len(sys.argv) > 1 and sys.argv[1] == "profile": if sys.platform.startswith("linux"): profile = True else: print "Memory profiling only support on Linux. Exiting..." sys.exit(1) results = [] for i in range(5): print "\n\nrun %d\n" % i
def clean(conn_str): print ('Connecting to data source...') conn = odbc.connect(conn_str) print ('Dropping the table') drop_table(conn)