示例#1
0
 def test_07_job_usage_factor_table_configurable(self):
     c.create_db(
         "flux_accounting_test_2.db",
         priority_usage_reset_period=10,
         priority_decay_half_life=1,
     )
     columns_query = "PRAGMA table_info(job_usage_factor_table)"
     test_conn = sqlite3.connect("flux_accounting_test_2.db")
     expected = [
         "usage_factor_period_0",
         "usage_factor_period_1",
         "usage_factor_period_2",
         "usage_factor_period_3",
         "usage_factor_period_4",
         "usage_factor_period_5",
         "usage_factor_period_6",
         "usage_factor_period_7",
         "usage_factor_period_8",
         "usage_factor_period_9",
     ]
     test = []
     cursor = test_conn.cursor()
     for row in cursor.execute(columns_query):
         if "usage_factor" in row[1]:
             test.append(row[1])
     self.assertEqual(test, expected)
示例#2
0
def main():

    parser = argparse.ArgumentParser(description="""
        Description: Translate command line arguments into
        SQLite instructions for the Flux Accounting Database.
        """)
    subparsers = parser.add_subparsers(help="sub-command help",
                                       dest="subcommand")
    subparsers.required = True

    add_arguments_to_parser(parser, subparsers)
    args = parser.parse_args()

    path = set_db_location(args)

    # if we are creating the DB for the first time, we need
    # to ONLY create the DB and then exit out successfully
    if args.func == "create_db":
        c.create_db(args.dbpath, args.priority_usage_reset_period,
                    args.priority_decay_half_life)
        sys.exit(0)

    conn = establish_sqlite_connection(path)

    output_file = set_output_file(args)

    try:
        select_accounting_function(args, conn, output_file, parser)
    finally:
        conn.close()
示例#3
0
 def test_06_job_usage_factor_table_default(self):
     c.create_db("flux_accounting_test_1.db")
     columns_query = "PRAGMA table_info(job_usage_factor_table)"
     test_conn = sqlite3.connect("flux_accounting_test_1.db")
     expected = [
         "usage_factor_period_0",
         "usage_factor_period_1",
         "usage_factor_period_2",
         "usage_factor_period_3",
     ]
     test = []
     cursor = test_conn.cursor()
     for row in cursor.execute(columns_query):
         if "usage_factor" in row[1]:
             test.append(row[1])
     self.assertEqual(test, expected)
示例#4
0
 def setUpClass(self):
     c.create_db("FluxAccounting.db")
     global conn
     conn = sqlite3.connect("FluxAccounting.db")
 def setUpClass(self):
     # create example accounting database
     c.create_db("TestAcctingSubcommands.db")
     global acct_conn
     global jobs_conn
     acct_conn = sqlite3.connect("TestAcctingSubcommands.db")
示例#6
0
    def setUpClass(self):
        global jobs_conn

        # create example job-archive database, output file
        global op
        op = "job_records.csv"

        jobs_conn = sqlite3.connect("file:jobs.db?mode:rwc", uri=True)
        jobs_conn.execute(
            """
                CREATE TABLE IF NOT EXISTS jobs (
                    id            int       NOT NULL,
                    userid        int       NOT NULL,
                    username      text      NOT NULL,
                    ranks         text      NOT NULL,
                    t_submit      real      NOT NULL,
                    t_sched       real      NOT NULL,
                    t_run         real      NOT NULL,
                    t_cleanup     real      NOT NULL,
                    t_inactive    real      NOT NULL,
                    eventlog      text      NOT NULL,
                    jobspec       text      NOT NULL,
                    R             text      NOT NULL,
                    PRIMARY KEY   (id)
            );"""
        )

        c.create_db("FluxAccountingUsers.db")
        global acct_conn
        acct_conn = sqlite3.connect("FluxAccountingUsers.db")

        # simulate end of half life period in FluxAccounting database
        update_stmt = """
            UPDATE t_half_life_period_table SET end_half_life_period=?
            WHERE cluster='cluster'
            """
        acct_conn.execute(update_stmt, ("10000000",))
        acct_conn.commit()

        # add bank hierarchy
        aclif.add_bank(acct_conn, bank="A", shares=1)
        aclif.add_bank(acct_conn, bank="B", parent_bank="A", shares=1)
        aclif.add_bank(acct_conn, bank="C", parent_bank="B", shares=1)
        aclif.add_bank(acct_conn, bank="D", parent_bank="B", shares=1)

        # add users
        aclif.add_user(acct_conn, username="******", bank="C")
        aclif.add_user(acct_conn, username="******", bank="C")
        aclif.add_user(acct_conn, username="******", bank="D")
        aclif.add_user(acct_conn, username="******", bank="D")

        jobid = 100
        interval = 0  # add to job timestamps to diversify job-archive records

        @mock.patch("time.time", mock.MagicMock(return_value=9000000))
        def populate_job_archive_db(jobs_conn, userid, username, ranks, num_entries):
            nonlocal jobid
            nonlocal interval
            t_inactive_delta = 2000

            for i in range(num_entries):
                try:
                    jobs_conn.execute(
                        """
                        INSERT INTO jobs (
                            id,
                            userid,
                            username,
                            ranks,
                            t_submit,
                            t_sched,
                            t_run,
                            t_cleanup,
                            t_inactive,
                            eventlog,
                            jobspec,
                            R
                        )
                        VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
                        """,
                        (
                            jobid,
                            userid,
                            username,
                            ranks,
                            (time.time() + interval) - 2000,
                            (time.time() + interval) - 1000,
                            (time.time() + interval),
                            (time.time() + interval) + 1000,
                            (time.time() + interval) + t_inactive_delta,
                            "eventlog",
                            "jobspec",
                            '{"version":1,"execution": {"R_lite":[{"rank":"0","children": {"core": "0"}}]}}',
                        ),
                    )
                    # commit changes
                    jobs_conn.commit()
                # make sure entry is unique
                except sqlite3.IntegrityError as integrity_error:
                    print(integrity_error)

                jobid += 1
                interval += 10000
                t_inactive_delta += 100

        # populate the job-archive DB with fake job entries
        populate_job_archive_db(jobs_conn, 1001, "1001", "0", 2)

        populate_job_archive_db(jobs_conn, 1002, "1002", "0-1", 3)
        populate_job_archive_db(jobs_conn, 1002, "1002", "0", 2)

        populate_job_archive_db(jobs_conn, 1003, "1003", "0-2", 3)

        populate_job_archive_db(jobs_conn, 1004, "1004", "0-3", 4)
        populate_job_archive_db(jobs_conn, 1004, "1004", "0", 4)