def test_update_table_column(): ctx = reload.context() fn = "/tmp/ssd1.csv" fn2 = "/tmp/ssd2.csv" csv1 = [[i, i] for i in range(10)] csv2 = [[i, i + 1] for i in range(1, 11)] n = len(csv1) n2 = len(csv2) write_csv(fn, ["ProposalID", "siteNumber"], csv1) write_csv(fn2, ["ProposalID", "siteNumber"], csv2) try: reload._updateDataIntoTableColumn(ctx, "SiteInformation", "ProposalID", fn, {}) rows = reload.readDataFromTable(ctx, "SiteInformation") assert (bag_contains(rows, [{ "siteNumber": str(row[1]), "ProposalID": str(row[0]) } for row in csv1])) reload._updateDataIntoTableColumn(ctx, "SiteInformation", "ProposalID", fn2, {}) rows = reload.readDataFromTable(ctx, "SiteInformation") assert (bag_contains(rows, [{ "siteNumber": str(row[1]), "ProposalID": str(row[0]) } for row in csv1 if row[0] not in list(map(lambda x: x[0], csv2))] + [{ "siteNumber": str(row[1]), "ProposalID": str(row[0]) } for row in csv2])) finally: reload.clearDatabase(ctx) reload.createTables(ctx) os.unlink(fn) os.unlink(fn2)
def do_test_post_table(verb1, verb2, src, cnttype, tablename, kvp1, kvp2, content1, content2, has_comments=False): print("cwd =", os.getcwd()) ctx = reload.context() pServer = Process(target=server.server, args=[ctx], kwargs={}) pServer.start() time.sleep(WAIT_PERIOD) pWorker = Process(target=reload.startWorker) pWorker.start() time.sleep(WAIT_PERIOD) try: print("get " + tablename) resp = requests.get("http://localhost:5000/table/" + tablename) assert (resp.json() == []) print("post " + tablename) resp = do_request_table(verb1, tablename, kvp1, src, cnttype, has_comments=has_comments) print(resp.text) assert resp.status_code == 200 taskid = resp.json() assert isinstance(taskid, str) wait_for_task_to_finish(taskid) print("get " + tablename) resp = requests.get("http://localhost:5000/table/" + tablename) respjson = resp.json() assert (bag_contains(respjson, content1)) print("post " + tablename) resp = do_request_table(verb2, tablename, kvp2, src, cnttype, has_comments=has_comments) assert resp.status_code == 200 taskid = resp.json() assert isinstance(taskid, str) wait_for_task_to_finish(taskid) print("get " + tablename) resp = requests.get("http://localhost:5000/table/" + tablename) respjson = resp.json() assert (bag_contains(respjson, content2)) finally: pWorker.terminate() pServer.terminate() reload.clearTasks() reload.clearDatabase(ctx) reload.createTables(ctx)
def test_clear_database(): ctx = reload.context() reload.clearDatabase(ctx) engine = create_engine("postgresql+psycopg2://" + ctx["dbuser"] + ":" + ctx["dbpass"] + "@" + ctx["dbhost"] + ":" + ctx["dbport"] + "/" + ctx["dbname"]) conn = engine.connect() rs = conn.execute("SELECT table_schema,table_name FROM information_schema.tables WHERE table_schema = 'public' ORDER BY table_schema,table_name").fetchall() assert len(rs) == 0 conn.close() reload.createTables(ctx)
def test_get_all_tasks(): ctx = reload.context() pServer = Process(target = server.server, args=[ctx], kwargs={}) print("starting server ctx = " + str(ctx)) pServer.start() print("server started, waiting for " + str(WAIT_PERIOD)) time.sleep(WAIT_PERIOD) print("clearing tasks") reload.clearTasks() print("clearing database") reload.clearDatabase(ctx) print("creating tables") reload.createTables(ctx) print("starting worker") pWorker = Process(target = reload.startWorker) pWorker.start() print("worker started, waiting for " + str(WAIT_PERIOD)) time.sleep(WAIT_PERIOD) print("set up") try: resp0 = requests.get("http://localhost:5000/task") assert len(resp0.json()["queued"]) == 0 resp1 = requests.post("http://localhost:5000/sync") task_id = resp1.json() wait_for_task_to_start(task_id) resp2 = requests.get("http://localhost:5000/task") assert resp2.json() == { "queued": [], "started": { "job_ids": [task_id], "expired_job_ids": [] }, "finished": { "job_ids": [], "expired_job_ids": [] }, "failed": { "job_ids": [], "expired_job_ids": [] }, "deferred": { "job_ids": [], "expired_job_ids": [] } } finally: pWorker.terminate() pServer.terminate() reload.clearTasks() reload.clearDatabase(ctx) reload.createTables(ctx)
def test_get_column_data_type_twice2(): ctx = reload.context() fn = "/tmp/ssd1.csv" csv1 = [[i, i] for i in range(10)] n = len(csv1) write_csv(fn, ["ProposalID", "siteNumber"], csv1) try: dt = reload.getColumnDataType(ctx, "SiteInformation", "ProposalID") assert dt == "bigint" reload._updateDataIntoTable(ctx, "SiteInformation", fn, {}) dt = reload.getColumnDataType(ctx, "SiteInformation", "ProposalID") assert dt == "bigint" finally: reload.clearDatabase(ctx) reload.createTables(ctx) os.unlink(fn)
def do_test_insert_table(src, kvp, has_comments=False): ctx = reload.context() n = countrows(src, "text/csv") - (1 if has_comments else 0) try: reload.insertDataIntoTable(ctx, "SiteInformation", src, kvp) rows = reload.readDataFromTable(ctx, "SiteInformation") assert (bag_contains(rows, [{ "siteNumber": str(i), **kvp } for i in range(1, n + 1)])) reload.insertDataIntoTable(ctx, "SiteInformation", src, kvp) rows = reload.readDataFromTable(ctx, "SiteInformation") assert (bag_contains(rows, [{ "siteNumber": str(i), **kvp } for i in range(1, n + 1)] * 2)) finally: reload.clearDatabase(ctx) reload.createTables(ctx)
def do_test_post_error(verb1, src, cnttype, tablename, kvp1, status_code, resp_text): ctx = reload.context() pServer = Process(target=server.server, args=[ctx], kwargs={}) pServer.start() time.sleep(WAIT_PERIOD) pWorker = Process(target=reload.startWorker) pWorker.start() time.sleep(WAIT_PERIOD) try: resp = do_request_table(verb1, tablename, kvp1, src, cnttype) assert resp.status_code == status_code taskid = resp.text assert re.match(resp_text, taskid) finally: pWorker.terminate() pServer.terminate() reload.clearTasks() reload.clearDatabase(ctx) reload.createTables(ctx)
def test_post_table_column(): ctx = reload.context() fn = "/tmp/ssd1.csv" fn2 = "/tmp/ssd2.csv" csv1 = [[i, i] for i in range(10)] csv2 = [[i, i + 1] for i in range(1, 11)] n = len(csv1) n2 = len(csv2) write_csv(fn, ["ProposalID", "siteNumber"], csv1) write_csv(fn2, ["ProposalID", "siteNumber"], csv2) tablename = "SiteInformation" column = "ProposalID" kvp1 = kvp2 = {} cnttype = "text/csv" verb1 = verb2 = requests.post content1 = [{ "siteNumber": str(row[1]), "ProposalID": str(row[0]) } for row in csv1] content2 = [{ "siteNumber": str(row[1]), "ProposalID": str(row[0]) } for row in csv1 if row[0] not in list(map(lambda x: x[0], csv2)) ] + [{ "siteNumber": str(row[1]), "ProposalID": str(row[0]) } for row in csv2] pServer = Process(target=server.server, args=[ctx], kwargs={}) pServer.start() time.sleep(WAIT_PERIOD) pWorker = Process(target=reload.startWorker) pWorker.start() time.sleep(WAIT_PERIOD) try: resp = do_request_table_column(verb1, tablename, column, kvp1, fn, cnttype) assert resp.status_code == 200 taskid = resp.json() assert isinstance(taskid, str) wait_for_task_to_finish(taskid) print("get " + tablename) resp = requests.get("http://localhost:5000/table/" + tablename) respjson = resp.json() assert (bag_contains(respjson, content1)) print("post " + tablename) resp = do_request_table_column(verb2, tablename, column, kvp2, fn2, cnttype) assert resp.status_code == 200 taskid = resp.json() assert isinstance(taskid, str) wait_for_task_to_finish(taskid) print("get " + tablename) resp = requests.get("http://localhost:5000/table/" + tablename) respjson = resp.json() assert (bag_contains(respjson, content2)) finally: pWorker.terminate() pServer.terminate() reload.clearTasks() reload.clearDatabase(ctx) reload.createTables(ctx)
def database(ctx, cleanup=True): try: yield finally: reload.clearDatabase(ctx) reload.createTables(ctx)