def test_store_copy_from(): with closing(connect()) as conn: with closing(conn.cursor()) as cursor: drop_table(cursor) create_table(cursor) add_initial_timestamp(cursor) initial_timestamp = get_scalar(cursor) conn.commit() print("initial timestamp: {}".format(initial_timestamp)) task1 = start_after(0, task("task 1", 4)) thread1 = Thread(target=task1) thread1.start() task2 = start_after(1, task("task 2", 1)) thread2 = Thread(target=task2) thread2.start() thread1.join() thread2.join() with closing(connect()) as conn: with closing(conn.cursor()) as cursor: get_timestamp(cursor) final_timestamp = get_scalar(cursor) print("final timestamp: {}".format(final_timestamp))
def setup(i): i.data = test_set() i.conn = connect() clear_database(i.conn) with closing(i.conn.cursor()) as cursor: i.data.load(cursor) i.conn.commit()
def setup(self): self.conn = connect() clear_database(self.conn) with closing(self.conn.cursor()) as cursor: self.datasource = name_to_datasource(cursor, "test-source") self.entitytype = name_to_entitytype(cursor, "test_type") self.conn.commit()
def test_run(self): self.maxDiff = None with closing(connect()) as conn: with closing(conn.cursor()) as cursor: cursor.execute("DELETE FROM trend.trendstore") cursor.execute("DELETE FROM attribute_directory.attributestore") cursor.execute( "SELECT trend.create_trendstore(" " 'test-datasource'," " 'test-entitytype'," " '900'," " ARRAY[" " ('x', 'integer', 'test trend')," " ('y', 'double precision', 'another test trend')" " ]::trend.trend_descr[]" ")") cursor.execute( "SELECT attribute_directory.create_attributestore(" " 'test-datasource'," " 'test-entitytype'," " ARRAY[" " ('height', 'double precision', 'fictive attribute')," " ('power', 'integer', 'another fictive attribute')" " ]::attribute_directory.attribute_descr[]" ")" ) conn.commit() process = subprocess.Popen(['minerva-dump'], stdout=subprocess.PIPE) out, err = process.communicate() self.assertMultiLineEqual(out, """\ SELECT trend.create_trendstore( 'test-datasource', 'test-entitytype', '900', ARRAY[ ('x', 'integer', ''), ('y', 'double precision', '') ]::trend.trend_descr[] ); SELECT attribute_directory.create_attributestore( 'test-datasource', 'test-entitytype', ARRAY[ ('height', 'double precision', 'fictive attribute'), ('power', 'integer', 'another fictive attribute') ]::attribute_directory.attribute_descr[] ); """)
def f(): obtain_lock = query( "LOCK TABLE system.job_queue " "IN SHARE UPDATE EXCLUSIVE MODE NOWAIT;") with closing(connect()) as conn: try: with closing(conn.cursor()) as cursor: obtain_lock(cursor) time.sleep(duration) except psycopg2.OperationalError: conn.rollback() else: conn.commit()
def f(): print("{} start".format(name)) with closing(connect()) as conn: with closing(conn.cursor()) as cursor: timestamp = now(cursor) print("{} timestamp: {}".format(name, timestamp)) sleep(duration) update_timestamp(cursor, timestamp) conn.commit() print("{} commit".format(name))
def main(): with closing(connect()) as conn: run(conn)
def main(): """Script entry point.""" with closing(connect()) as conn: jobsource_id = add_dummy_jobsource(conn) run(conn, jobsource_id)