def test_07_edit_bank_value(self): aclif.add_bank(acct_conn, bank="root", shares=100) aclif.edit_bank(acct_conn, bank="root", shares=50) cursor = acct_conn.cursor() cursor.execute("SELECT shares FROM bank_table where bank='root'") self.assertEqual(cursor.fetchone()[0], 50)
def test_04_add_subaccounts(self): aclif.add_bank(acct_conn, bank="sub_account_1", parent_bank="root", shares=50) select_stmt = "SELECT * FROM bank_table WHERE bank='sub_account_1'" dataframe = pd.read_sql_query(select_stmt, acct_conn) self.assertEqual(len(dataframe.index), 1) aclif.add_bank(acct_conn, bank="sub_account_2", parent_bank="root", shares=50) select_stmt = "SELECT * FROM bank_table WHERE bank='sub_account_2'" dataframe = pd.read_sql_query(select_stmt, acct_conn) self.assertEqual(len(dataframe.index), 1)
def test_03_add_with_invalid_parent_bank(self): with self.assertRaises(Exception) as context: aclif.add_bank( acct_conn, bank="bad_subaccount", parent_bank="bad_parentaccount", shares=1, ) self.assertTrue("Parent bank not found in bank table" in str(context.exception))
def select_accounting_function(args, conn, output_file, parser): if args.func == "view_user": aclif.view_user(conn, args.username) elif args.func == "add_user": aclif.add_user( conn, args.username, args.bank, args.admin_level, args.shares, args.max_jobs, args.max_wall_pj, ) elif args.func == "delete_user": aclif.delete_user(conn, args.username, args.bank) elif args.func == "edit_user": aclif.edit_user(conn, args.username, args.field, args.new_value) elif args.func == "view_job_records": jobs.view_job_records( conn, output_file, jobid=args.jobid, user=args.user, before_end_time=args.before_end_time, after_start_time=args.after_start_time, ) elif args.func == "add_bank": aclif.add_bank(conn, args.bank, args.shares, args.parent_bank) elif args.func == "view_bank": aclif.view_bank(conn, args.bank) elif args.func == "delete_bank": aclif.delete_bank(conn, args.bank) elif args.func == "edit_bank": aclif.edit_bank(conn, args.bank, args.shares) elif args.func == "update_usage": jobs_conn = establish_sqlite_connection(args.job_archive_db_path) jobs.update_job_usage(conn, jobs_conn, args.priority_decay_half_life) else: print(parser.print_usage())
def test_06_delete_parent_bank(self): aclif.delete_bank(acct_conn, bank="root") aclif.delete_bank(acct_conn, bank="sub_account_2") aclif.add_bank(acct_conn, bank="A", shares=1) aclif.add_bank(acct_conn, bank="B", parent_bank="A", shares=1) aclif.add_bank(acct_conn, bank="D", parent_bank="B", shares=1) aclif.add_bank(acct_conn, bank="E", parent_bank="B", shares=1) aclif.add_bank(acct_conn, bank="C", parent_bank="A", shares=1) aclif.add_bank(acct_conn, bank="F", parent_bank="C", shares=1) aclif.add_bank(acct_conn, bank="G", parent_bank="C", shares=1) aclif.delete_bank(acct_conn, bank="A") select_stmt = "SELECT * FROM bank_table" dataframe = pd.read_sql_query(select_stmt, acct_conn) self.assertEqual(len(dataframe), 0)
def test_02_add_dup_bank(self): aclif.add_bank(acct_conn, bank="root", shares=100) self.assertRaises(sqlite3.IntegrityError)
def test_01_add_bank_success(self): aclif.add_bank(acct_conn, bank="root", shares=100) select_stmt = "SELECT * FROM bank_table WHERE bank='root'" dataframe = pd.read_sql_query(select_stmt, acct_conn) self.assertEqual(len(dataframe.index), 1)
def test_08_edit_bank_value_fail(self): with self.assertRaises(Exception) as context: aclif.add_bank(acct_conn, bank="bad_bank", shares=10) aclif.edit_bank(acct_conn, bank="bad_bank", shares=-1) self.assertTrue("New shares amount must be >= 0" in str(context.exception))
def setUpClass(self): global jobs_conn # create example job-archive database, output file global op op = "job_records.csv" jobs_conn = sqlite3.connect("file:jobs.db?mode:rwc", uri=True) jobs_conn.execute( """ CREATE TABLE IF NOT EXISTS jobs ( id int NOT NULL, userid int NOT NULL, username text NOT NULL, ranks text NOT NULL, t_submit real NOT NULL, t_sched real NOT NULL, t_run real NOT NULL, t_cleanup real NOT NULL, t_inactive real NOT NULL, eventlog text NOT NULL, jobspec text NOT NULL, R text NOT NULL, PRIMARY KEY (id) );""" ) c.create_db("FluxAccountingUsers.db") global acct_conn acct_conn = sqlite3.connect("FluxAccountingUsers.db") # simulate end of half life period in FluxAccounting database update_stmt = """ UPDATE t_half_life_period_table SET end_half_life_period=? WHERE cluster='cluster' """ acct_conn.execute(update_stmt, ("10000000",)) acct_conn.commit() # add bank hierarchy aclif.add_bank(acct_conn, bank="A", shares=1) aclif.add_bank(acct_conn, bank="B", parent_bank="A", shares=1) aclif.add_bank(acct_conn, bank="C", parent_bank="B", shares=1) aclif.add_bank(acct_conn, bank="D", parent_bank="B", shares=1) # add users aclif.add_user(acct_conn, username="******", bank="C") aclif.add_user(acct_conn, username="******", bank="C") aclif.add_user(acct_conn, username="******", bank="D") aclif.add_user(acct_conn, username="******", bank="D") jobid = 100 interval = 0 # add to job timestamps to diversify job-archive records @mock.patch("time.time", mock.MagicMock(return_value=9000000)) def populate_job_archive_db(jobs_conn, userid, username, ranks, num_entries): nonlocal jobid nonlocal interval t_inactive_delta = 2000 for i in range(num_entries): try: jobs_conn.execute( """ INSERT INTO jobs ( id, userid, username, ranks, t_submit, t_sched, t_run, t_cleanup, t_inactive, eventlog, jobspec, R ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) """, ( jobid, userid, username, ranks, (time.time() + interval) - 2000, (time.time() + interval) - 1000, (time.time() + interval), (time.time() + interval) + 1000, (time.time() + interval) + t_inactive_delta, "eventlog", "jobspec", '{"version":1,"execution": {"R_lite":[{"rank":"0","children": {"core": "0"}}]}}', ), ) # commit changes jobs_conn.commit() # make sure entry is unique except sqlite3.IntegrityError as integrity_error: print(integrity_error) jobid += 1 interval += 10000 t_inactive_delta += 100 # populate the job-archive DB with fake job entries populate_job_archive_db(jobs_conn, 1001, "1001", "0", 2) populate_job_archive_db(jobs_conn, 1002, "1002", "0-1", 3) populate_job_archive_db(jobs_conn, 1002, "1002", "0", 2) populate_job_archive_db(jobs_conn, 1003, "1003", "0-2", 3) populate_job_archive_db(jobs_conn, 1004, "1004", "0-3", 4) populate_job_archive_db(jobs_conn, 1004, "1004", "0", 4)