Beispiel #1
0
    def new_db(self):
        """Returns a new HPCStatsDB object."""

        # Instantiate connexion to db
        db = HPCStatsDB(self.conf)
        db.bind()
        return db
class TestsProjectImporterCSVUpdate(HPCStatsTestCase):

    @mock.patch("HPCStats.DB.HPCStatsDB.psycopg2", mock_psycopg2())
    def setUp(self):
        self.filename = 'fake'
        self.cluster = 'testcluster'
        HPCStatsConf.__bases__ = (MockConfigParser, object)
        self.conf = HPCStatsConf(self.filename, self.cluster)
        self.conf.conf = CONFIG
        self.app = None
        self.db = HPCStatsDB(self.conf)
        self.db.bind()
        self.importer = ProjectImporterCSV(self.app, self.db, self.conf)

    def test_update(self):
        """ProjectImporterCSV.update() works with simple data
        """

        domain1 = Domain('dom1', 'domain name 1')
        project1 = Project(domain1, 'code1', 'project description 1')

        MockPg2.PG_REQS['save_project'].set_assoc(
          params=( project1.code, project1.description, domain1.key ),
          result=[ [ 1 ] ]
        )
        self.importer.projects = [ project1 ]
        self.importer.domains = [ domain1 ]

        self.importer.update()
class TestsProjectImporterCSVUpdate(HPCStatsTestCase):
    @mock.patch("HPCStats.DB.HPCStatsDB.psycopg2", mock_psycopg2())
    def setUp(self):
        self.filename = 'fake'
        self.cluster = 'testcluster'
        HPCStatsConf.__bases__ = (MockConfigParser, object)
        self.conf = HPCStatsConf(self.filename, self.cluster)
        self.conf.conf = CONFIG
        self.app = None
        self.db = HPCStatsDB(self.conf)
        self.db.bind()
        self.importer = ProjectImporterCSV(self.app, self.db, self.conf)

    def test_update(self):
        """ProjectImporterCSV.update() works with simple data
        """

        domain1 = Domain('dom1', 'domain name 1')
        project1 = Project(domain1, 'code1', 'project description 1')

        MockPg2.PG_REQS['save_project'].set_assoc(params=(project1.code,
                                                          project1.description,
                                                          domain1.key),
                                                  result=[[1]])
        self.importer.projects = [project1]
        self.importer.domains = [domain1]

        self.importer.update()
Beispiel #4
0
class TestsBusinessCodeImporterCSVUpdate(HPCStatsTestCase):
    @mock.patch("HPCStats.DB.HPCStatsDB.psycopg2", mock_psycopg2())
    def setUp(self):
        self.filename = 'fake'
        self.cluster = Cluster('testcluster')
        HPCStatsConf.__bases__ = (MockConfigParser, object)
        self.conf = HPCStatsConf(self.filename, self.cluster.name)
        self.conf.conf = CONFIG
        self.app = None
        self.db = HPCStatsDB(self.conf)
        self.db.bind()
        self.importer = BusinessCodeImporterCSV(self.app, self.db, self.conf)

    def test_update_not_exists(self):
        """ProjectImporterCSV.update() works when business code does not exist
        """
        business1 = Business('code1', 'business description 1')
        self.importer.businesses = [business1]
        self.importer.update()

    @mock.patch("%s.Business.save" % (module))
    def test_update_not_exists_with_mock(self, mock_save):
        """ProjectImporterCSV.update() call Business.save() when business code
           does not exist
        """

        business1 = Business('code1', 'business description 1')

        MockPg2.PG_REQS['existing_business'].set_assoc(
            params=(business1.code, ), result=[])

        self.importer.businesses = [business1]
        self.importer.update()
        mock_save.assert_called_with(self.db)

    def test_update_exists(self):
        """ProjectImporterCSV.update() works when business code exists
        """

        business1 = Business('code1', 'business description 1')

        MockPg2.PG_REQS['existing_business'].set_assoc(
            params=(business1.code, ), result=[['code1']])

        self.importer.businesses = [business1]
        self.importer.update()

    @mock.patch("%s.Business.update" % (module))
    def test_update_exists_with_mock(self, mock_update):
        """ProjectImporterCSV.update() call Business.update() when business
           code exists
        """
        business1 = Business('code1', 'business description 1')

        MockPg2.PG_REQS['existing_business'].set_assoc(
            params=(business1.code, ), result=[['code1']])

        self.importer.businesses = [business1]
        self.importer.update()
        mock_update.assert_called_with(self.db)
class TestsArchitectureImporterArchfileUpdate(HPCStatsTestCase):

    @mock.patch("HPCStats.DB.HPCStatsDB.psycopg2", mock_psycopg2())
    def setUp(self):
        self.filename = 'fake'
        self.cluster = 'test_cluster'
        HPCStatsConf.__bases__ = (MockConfigParser, object)
        self.conf = HPCStatsConf(self.filename, self.cluster)
        self.conf.conf = CONFIG.copy()
        self.app = None
        self.db = HPCStatsDB(self.conf)
        self.db.bind()
        self.importer = ArchitectureImporterArchfile(self.app,
                                                     self.db,
                                                     self.conf,
                                                     self.cluster)
        init_reqs()

    def test_update(self):
        """ProjectImporterCSV.update() creates cluster and node if not existing
        """

        cluster1 = Cluster('cluster1')
        node1 = Node('node1', cluster1, 'model1', 'test_partition', 12, 6 * 1024 ** 3, 1)

        MockPg2.PG_REQS['save_cluster'].set_assoc(
          params=( cluster1.name ),
          result=[ [ 1 ] ]
        )
        MockPg2.PG_REQS['save_node'].set_assoc(
          params=( node1.name, cluster1.cluster_id, node1.partition,
                   node1.cpu, node1.memory, node1.flops ),
          result=[ [ 1 ] ]
        )
        self.importer.cluster = cluster1
        self.importer.nodes = [ node1 ]

        self.importer.update()

    def test_update_2(self):
        """ProjectImporterCSV.update() detect existing cluster and node
        """

        cluster1 = Cluster('cluster1')
        node1 = Node('node1', cluster1, 'model1', 'test_partition', 12, 6 * 1024 ** 3, 1)

        MockPg2.PG_REQS['find_cluster'].set_assoc(
          params=( cluster1.name, ),
          result=[ [ 1 ] ]
        )
        MockPg2.PG_REQS['find_node'].set_assoc(
          params=( node1.name, cluster1.cluster_id, ),
          result=[ [ 1 ] ]
        )
        self.importer.cluster = cluster1
        self.importer.nodes = [ node1 ]

        self.importer.update()
Beispiel #6
0
class TestsUserImporterLdapSlurm(HPCStatsTestCase):

    @mock.patch("HPCStats.DB.HPCStatsDB.psycopg2", mock_psycopg2())
    def setUp(self):
        self.filename = 'fake'
        self.cluster = Cluster('testcluster')
        self.cluster.cluster_id = 0
        HPCStatsConf.__bases__ = (MockConfigParser, object)
        self.conf = HPCStatsConf(self.filename, self.cluster)
        self.conf.conf = CONFIG.copy()
        self.db = HPCStatsDB(self.conf)
        self.db.bind()
        self.app = MockApp(self.db, self.conf, self.cluster)
        self.importer = UserImporterLdapSlurm(self.app,
                                              self.db,
                                              self.conf,
                                              self.cluster)
        # Disable strict_user_membership to avoid exception when user found
        # in Slurm and not in LDAP then.
        self.importer.strict_user_membership = False
        init_reqs()

    def test_init(self):
        """UserImporterLdap.__init__() initializes w/o error
        """
        pass

    @mock.patch("%s.ldap" % (p_module), mock_ldap())
    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_load_user_no_ldap(self):
        """UserImporterLdapSlurm.load() should not user from Slurm if not found
           in LDAP.
        """

        users = [ ]
        users_no_group = [ ]
        fill_ldap_users(CONFIG['testcluster/ldap'], users, users_no_group)

        MockMySQLdb.MY_REQS['get_users']['res'] = \
        [ [ 'login1' ] ]

        self.importer.load()
        self.assertEquals(len(self.importer.users), 0)

    @mock.patch("%s.ldap" % (p_module), mock_ldap())
    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_load_user_ok_ldap(self):
        """UserImporterLdapSlurm.load() should load user from Slurm if also
           found in LDAP.
        """

        users = [ ]
        users_no_group = [ 'login1' ]
        fill_ldap_users(CONFIG['testcluster/ldap'], users, users_no_group)

        MockMySQLdb.MY_REQS['get_users']['res'] = \
        [ [ 'login1' ] ]

        self.importer.load()
        self.assertEquals(len(self.importer.users), 1)
        self.assertEquals(len(self.importer.users_acct_ldap), 0)
        self.assertEquals(len(self.importer.users_acct_slurm), 1)

    @mock.patch("%s.ldap" % (p_module), mock_ldap())
    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_load_user_in_group(self):
        """UserImporterLdapSlurm.load() should not load user from Slurm if
           already loaded because it is member of cluster group.
        """

        users = [ 'login1' ]
        users_no_group = [ ]
        fill_ldap_users(CONFIG['testcluster/ldap'], users, users_no_group)

        MockMySQLdb.MY_REQS['get_users']['res'] = \
        [ [ 'login1' ] ]

        self.importer.load()
        self.assertEquals(len(self.importer.users), 1)
        self.assertEquals(len(self.importer.users_acct_ldap), 1)
        self.assertEquals(len(self.importer.users_acct_slurm), 0)


    @mock.patch("%s.ldap" % (p_module), mock_ldap())
    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_load_no_redundancy(self):
        """UserImporterLdapSlurm.load() should manage redundancy with LDAP.
        """

        users = [ 'login1', 'login2', 'login3' ]
        fill_ldap_users(CONFIG['testcluster/ldap'], users)

        MockMySQLdb.MY_REQS['get_users']['res'] = \
        [ [ 'login1' ], [ 'login2' ], [ 'login4' ] ]

        self.importer.load()
        self.assertEquals(len(self.importer.users), 3)
        self.assertEquals(len(self.importer.accounts), 3)

    @mock.patch("%s.ldap" % (p_module), mock_ldap())
    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    @mock.patch("%s.User.save" % (module))
    @mock.patch("%s.Account.save" % (module))
    def test_update_new_user(self, m_account_save, m_user_save):
        """UserImporterLdapSlurm.slurm() should save the user and the account if
           not existing in DB
        """

        users = [ ]
        users_no_group = [ 'login1' ]
        fill_ldap_users(CONFIG['testcluster/ldap'], users, users_no_group)

        MockMySQLdb.MY_REQS['get_users']['res'] = \
        [ [ 'login1' ] ]

        user1_id = 1

        MockPg2.PG_REQS['find_user'].set_assoc(
          params=( 'login1', ),
          result=[ ]
          )
        MockPg2.PG_REQS['existing_account'].set_assoc(
          params=( user1_id, self.cluster.cluster_id, ),
          result=[ ]
          )

        self.importer.load()
        self.importer.update()
        self.assertEquals(self.importer.accounts[0].creation_date,
                          date(1970, 1, 1))
        self.assertEquals(self.importer.accounts[0].deletion_date,
                          date(1970, 1, 1))
        m_user_save.assert_called_with(self.db)
        m_account_save.assert_called_with(self.db)

    @mock.patch("%s.ldap" % (p_module), mock_ldap())
    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    @mock.patch("%s.User.update" % (module))
    @mock.patch("%s.Account.update" % (module))
    def test_update_user_account_exist(self, m_account_update, m_user_update):
        """UserImporterLdapSlurm.slurm() should update the user and do not
           touch the account if they already exist in DB
        """

        users = [ ]
        users_no_group = [ 'login1' ]
        fill_ldap_users(CONFIG['testcluster/ldap'], users, users_no_group)

        MockMySQLdb.MY_REQS['get_users']['res'] = \
        [ [ 'login1' ] ]

        user1_id = 1

        MockPg2.PG_REQS['find_user'].set_assoc(
          params=( 'login1', ),
          result=[ [ user1_id ] ]
          )
        MockPg2.PG_REQS['existing_account'].set_assoc(
          params=( user1_id, self.cluster.cluster_id, ),
          result=[ [ 0 ] ]
          )

        self.importer.load()
        self.importer.update()
        m_user_update.assert_called_with(self.db)
        # ensure Account.update() is not called
        self.assertRaises(AssertionError,
                          m_account_update.assert_called_with,
                          self.db,
                          None)
Beispiel #7
0
class TestsUserImporterLdap(HPCStatsTestCase):

    @mock.patch("HPCStats.DB.HPCStatsDB.psycopg2", mock_psycopg2())
    def setUp(self):
        self.filename = 'fake'
        self.cluster = Cluster('testcluster')
        self.cluster.cluster_id = 0
        HPCStatsConf.__bases__ = (MockConfigParser, object)
        self.conf = HPCStatsConf(self.filename, self.cluster)
        self.conf.conf = CONFIG.copy()
        self.db = HPCStatsDB(self.conf)
        self.db.bind()
        self.app = MockApp(self.db, self.conf, self.cluster)
        self.importer = UserImporterLdap(self.app,
                                         self.db,
                                         self.conf,
                                         self.cluster)
        init_reqs()

    def test_init(self):
        """UserImporterLdap.__init__() initializes w/o error
        """
        pass

    @mock.patch("%s.ldap" % (module), mock_ldap())
    def test_load_simple(self):
        """UserImporterLdap.load() should work with simple data from LDAP.
        """

        users = [ 'login1', 'login2', 'login3' ]
        fill_ldap_users(CONFIG['testcluster/ldap'], users)

        MockPg2.PG_REQS['get_unclosed_accounts'].set_assoc (
          params=( 0, ),
          result=[ ]
        )

        self.importer.load()
        self.assertEquals(len(self.importer.users), 3)
        self.assertEquals(len(self.importer.accounts), 3)

    @mock.patch("%s.ldap" % (module), mock_ldap())
    def test_load_simple2(self):
        """UserImporterLdap.load() should work with simple data from LDAP and
           HPCStatsDB.
        """

        users = [ 'login1', 'login2', 'login3' ]
        fill_ldap_users(CONFIG['testcluster/ldap'], users)

        creation_user4 = datetime(2015, 3, 2, 16, 0, 1)
        MockPg2.PG_REQS['get_unclosed_accounts'].set_assoc(
          params=( 0, ),
          result=[ [ 0, 'login4', 'name_user4', 'firstname_user4',
                     'department_user4', 0, 0, creation_user4 ] ]
          )

        self.importer.load()
        self.assertEquals(len(self.importer.users), 4)
        self.assertEquals(len(self.importer.accounts), 4)

    @mock.patch("%s.ldap" % (module), mock_ldap())
    @mock.patch("%s.User.save" % (module))
    @mock.patch("%s.Account.save" % (module))
    def test_load_update_new_user_other_accounts(self, m_account_save, m_user_save):
        """UserImporterLdap.update() create new user/account found in LDAP
           and not found in HPCStatsDB with a creation date equals to today
           because there are already existing accounts.
        """

        users = [ 'login1' ]
        fill_ldap_users(CONFIG['testcluster/ldap'], users)

        MockPg2.PG_REQS['get_unclosed_accounts'].set_assoc(
          params=( self.cluster.cluster_id, ),
          result=[ ]
          )
        MockPg2.PG_REQS['nb_existing_accounts'].set_assoc(
          params=( self.cluster.cluster_id, ),
          result=[ [ 0 ], [ 1 ] ]
          )

        self.importer.load()
        self.importer.update()
        self.assertEquals(self.importer.accounts[0].creation_date, date.today())
        m_user_save.assert_called_with(self.db)
        m_account_save.assert_called_with(self.db)

    @mock.patch("%s.ldap" % (module), mock_ldap())
    @mock.patch("%s.User.save" % (module))
    @mock.patch("%s.Account.save" % (module))
    def test_load_update_new_user_no_account(self, m_account_save, m_user_save):
        """UserImporterLdap.update() save new user/account found in LDAP
           and not found in HPCStatsDB with a creation date equals to epoch
           because there is none existing accounts
        """

        users = [ 'login1' ]
        fill_ldap_users(CONFIG['testcluster/ldap'], users)

        self.importer.load()
        self.importer.update()
        self.assertEquals(self.importer.accounts[0].creation_date, date(1970, 1, 1))
        m_user_save.assert_called_with(self.db)
        m_account_save.assert_called_with(self.db)

    @mock.patch("%s.ldap" % (module), mock_ldap())
    @mock.patch("%s.Account.update" % (module))
    def test_load_update_close_account(self, m_account_update):
        """UserImporterLdap.update() close account found as unclosed in
           HPCStatsDB and not found in LDAP.
        """

        users = [ ]
        fill_ldap_users(CONFIG['testcluster/ldap'], users)

        creation_user2 = datetime(2015, 3, 2, 16, 0, 1)

        MockPg2.PG_REQS['get_unclosed_accounts'].set_assoc(
          params=( self.cluster.cluster_id, ),
          result=[ [ 2, 'login2', 'name_user2', 'firstname_user2',
                     'department_user2', 0, 0, creation_user2 ] ]
          )

        self.importer.load()
        self.importer.update()
        self.assertEquals(self.importer.accounts[0].deletion_date, date.today())
        m_account_update.assert_called_with(self.db)

    @mock.patch("%s.ldap" % (module), mock_ldap())
    @mock.patch("%s.User.update" % (module))
    @mock.patch("%s.Account.save" % (module))
    def test_load_update_user_wo_account(self, m_account_save, m_user_update):
        """UserImporterLdap.update() create account and update user found in
           LDAP and in HPCStatsDB but w/o account on the cluster.
        """

        users = [ 'login3' ]
        fill_ldap_users(CONFIG['testcluster/ldap'], users)

        user3_id = 3

        MockPg2.PG_REQS['find_user'].set_assoc(
          params=( 'login3', ),
          result=[ [ user3_id ] ]
          )
        MockPg2.PG_REQS['existing_account'].set_assoc(
          params=( user3_id, self.cluster.cluster_id, ),
          result=[ ]
          )

        self.importer.load()
        self.importer.update()
        m_user_update.assert_called_with(self.db)
        m_account_save.assert_called_with(self.db)

    @mock.patch("%s.ldap" % (module), mock_ldap())
    @mock.patch("%s.User.update" % (module))
    @mock.patch("%s.Account.update" % (module))
    def test_load_update_user_w_account(self, m_account_update, m_user_update):
        """UserImporterLdap.update() update user found in LDAP and in HPCStatsDB
           with an unclosed account on the cluster.
        """

        users = [ 'login4' ]
        fill_ldap_users(CONFIG['testcluster/ldap'], users)

        creation_user4 = datetime(2015, 3, 2, 16, 0, 1)
        user4_id = 4
        MockPg2.PG_REQS['find_user'].set_assoc(
          params=( 'login4', ),
          result=[ [ user4_id ] ]
          )
        MockPg2.PG_REQS['existing_account'].set_assoc(
          params=( user4_id, self.cluster.cluster_id, ),
          result=[ [ 0 ] ]
          )
        MockPg2.PG_REQS['load_account'].set_assoc(
          params=( user4_id, self.cluster.cluster_id, ),
          result=[ [ 0, 0, creation_user4, None ] ]
        )

        self.importer.load()
        self.importer.update()
        m_user_update.assert_called_with(self.db)
        # ensure Account.update() is not called
        self.assertRaises(AssertionError,
                          m_account_update.assert_called_with,
                          self.db,
                          None)

    @mock.patch("%s.ldap" % (module), mock_ldap())
    @mock.patch("%s.User.update" % (module))
    @mock.patch("%s.Account.update" % (module))
    def test_load_update_user_closed_account(self, m_account_update, m_user_update):
        """UserImporterLdap.update() update user found in LDAP and in HPCStatsDB
           with a closed account on the cluster.
        """
        users = [ 'login5' ]
        fill_ldap_users(CONFIG['testcluster/ldap'], users)

        user5_creation = datetime(2015, 3, 2, 16, 0, 1)
        user5_deletion = datetime(2015, 3, 2, 16, 0, 1)
        user5_id = 5
        MockPg2.PG_REQS['find_user'].set_assoc(
          params=( 'login5', ),
          result=[ [ user5_id ] ]
          )
        MockPg2.PG_REQS['existing_account'].set_assoc(
          params=( user5_id, self.cluster.cluster_id, ),
          result=[ [ 0 ] ]
          )
        MockPg2.PG_REQS['load_account'].set_assoc(
          params=( user5_id, self.cluster.cluster_id, ),
          result=[ [ 0, 0, user5_creation, user5_deletion ] ]
        )

        self.importer.load()
        self.importer.update()
        self.assertEquals(self.importer.accounts[0].deletion_date, None)
        m_user_update.assert_called_with(self.db)
        m_account_update.assert_called_with(self.db)
Beispiel #8
0
class TestsJobImporterSlurm(HPCStatsTestCase):

    @mock.patch("HPCStats.DB.HPCStatsDB.psycopg2", mock_psycopg2())
    def setUp(self):
        self.filename = 'fake'
        self.cluster = Cluster('testcluster')
        HPCStatsConf.__bases__ = (MockConfigParser, object)
        self.conf = HPCStatsConf(self.filename, self.cluster)
        self.conf.conf = CONFIG.copy()
        self.db = HPCStatsDB(self.conf)
        self.db.bind()
        self.app = MockApp(self.db, self.conf, self.cluster)
        self.importer = JobImporterSlurm(self.app,
                                         self.db,
                                         self.conf,
                                         self.cluster)
        init_reqs()

    def test_init(self):
        """JobImporterSlurm.__init__() initializes object with attributes
        """
        self.assertEquals(self.importer._dbhost,
                          self.conf.conf[self.cluster.name + '/slurm']['host'])

    def load_app(self):
        """Load App objects for JobImporterSlurm.load() normal operation."""
        j1_submit = datetime(2015, 3, 2, 16, 0, 1)
        j1_start = datetime(2015, 3, 2, 16, 0, 2)
        j1_end = datetime(2015, 3, 2, 16, 0, 3)
        j1_submit_ts = time.mktime(j1_submit.timetuple())
        j1_start_ts = time.mktime(j1_start.timetuple())
        j1_end_ts = time.mktime(j1_end.timetuple())

        node1 = Node('node1', self.cluster, 'model1', 'partition1', 4, 4, 0)
        node2 = Node('node2', self.cluster, 'model1', 'partition1', 4, 4, 0)

        a1_create = datetime(2010, 1, 1, 12, 0, 0)
        user1 = User('user1', 'firstname1', 'lastname1', 'department1')
        account1 = Account(user1, self.cluster, 1000, 1000, a1_create, None)

        domain1 = Domain('domain1', 'domain 1')
        project1 = Project(domain1, 'project1', 'description project 1')

        business1 = Business('business1', 'business description 1')

        self.app.arch.nodes = [ node1, node2 ]
        self.app.users.accounts = [ account1 ]
        self.app.projects.projects = [ project1 ]
        self.app.business.businesses = [ business1 ]

        MockMySQLdb.MY_REQS['get_jobs_after_batchid']['res'] = \
          [
            [ 0, 0, 1000, 1000, j1_submit_ts, j1_start_ts, j1_end_ts,
              2, '1=4', 'partition1', 'qos1', 1, 'node[1-2]', 'user1',
              'job1', 'project1:business1' ],
          ]

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_is_old_schema(self):
        """JobImporterSlurm._is_old_schema() should return True is SlurmDBD
           <15.08 is detected, False otherwise."""

        self.load_app()
        self.importer.connect_db()
        MockMySQLdb.MY_REQS['job_table_cols']['res'] = \
          [ [ 'cpus_alloc', ] , ]
        self.assertEquals(self.importer._is_old_schema(), True)

        MockMySQLdb.MY_REQS['job_table_cols']['res'] = []
        self.assertEquals(self.importer._is_old_schema(), False)

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_load(self):
        """JobImporterSlurm.load() works with simple data."""

        self.load_app()
        # make sure new-schema is used here
        MockMySQLdb.MY_REQS['job_table_cols']['res'] = [ ]

        self.importer.load()

        self.assertEquals(len(self.importer.jobs), 1)
        self.assertEquals(len(self.importer.runs), 2)

        job = self.importer.jobs[0]

        self.assertEquals(job.nbcpu, 4)
        self.assertEquals(job.state, 'RUNNING')
        self.assertEquals(job.name, 'job1')
        self.assertEquals(job.queue, 'partition1-qos1')
        self.assertEquals(job.account, self.app.users.accounts[0])
        self.assertEquals(job.project, self.app.projects.projects[0])
        self.assertEquals(job.business, self.app.business.businesses[0])

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_load_old_schema(self):
        """JobImporterSlurm.load() works with simple data from old SlurmDBD
           <15.08 schema."""

        self.load_app()

        MockMySQLdb.MY_REQS['job_table_cols']['res'] = \
          [ [ 'cpus_alloc', ] , ]
        # replace TRES '1=4' by cpus_alloc 4
        MockMySQLdb.MY_REQS['get_jobs_after_batchid']['res'][0][8] = 4

        self.importer.load()

        self.assertEquals(len(self.importer.jobs), 1)
        self.assertEquals(len(self.importer.runs), 2)

        job = self.importer.jobs[0]

        self.assertEquals(job.nbcpu, 4)
        self.assertEquals(job.state, 'RUNNING')
        self.assertEquals(job.name, 'job1')
        self.assertEquals(job.queue, 'partition1-qos1')
        self.assertEquals(job.account, self.app.users.accounts[0])
        self.assertEquals(job.project, self.app.projects.projects[0])
        self.assertEquals(job.business, self.app.business.businesses[0])

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    @mock.patch("%s.JobImporterSlurm.get_jobs_after_batchid" % (module))
    def test_load_search_batchid(self, mock_get_jobs):
        """JobImporterSlurm.load() must search jobs after correct batch_id."""

        MockPg2.PG_REQS['get_batchid_oldest_unfinished'].set_assoc(
          params=( self.cluster.cluster_id ),
          result=[ [ 2 ] ]
        )
        MockPg2.PG_REQS['get_batchid_last'].set_assoc(
          params=( self.cluster.cluster_id ),
          result=[ [ 3 ] ]
        )

        self.importer.load()
        mock_get_jobs.assert_called_with(2)

        # None unfinished job, search must be done with batch_id of lasti job.
        MockPg2.PG_REQS['get_batchid_oldest_unfinished'].set_assoc(
          params=( self.cluster.cluster_id ),
          result=[ ]
        )
        MockPg2.PG_REQS['get_batchid_last'].set_assoc(
          params=( self.cluster.cluster_id ),
          result=[ [ 4 ] ]
        )

        self.importer.load()
        mock_get_jobs.assert_called_with(4)

        # No job in DB: search starting -1.
        MockPg2.PG_REQS['get_batchid_oldest_unfinished'].set_assoc(
          params=( self.cluster.cluster_id ),
          result=[ ]
        )
        MockPg2.PG_REQS['get_batchid_last'].set_assoc(
          params=( self.cluster.cluster_id ),
          result=[ ]
        )

        self.importer.load()
        mock_get_jobs.assert_called_with(-1)

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_load_account_not_found(self):
        """JobImporterSlurm.load() raises exception when account not found"""

        self.load_app()
        self.app.users.accounts = [ ]

        self.assertRaisesRegexp(
               HPCStatsSourceError,
               "account user1 not found in loaded account",
               self.importer.load)

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_load_invalid_tres(self):
        """JobImporterSlurm.load() raises exception if invalid tres for a job
           is found"""

        self.load_app()

        MockMySQLdb.MY_REQS['get_jobs_after_batchid']['res'][0][8] = '0=0'
        self.assertRaisesRegexp(
               HPCStatsSourceError,
               "unable to extract cpus_alloc from job tres",
               self.importer.load)

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_load_invalid_wckey(self):
        """JobImporterSlurm.load() raises exception when format of wckey is
           invalid.
        """

        self.load_app()

        MockMySQLdb.MY_REQS['get_jobs_after_batchid']['res'][0][15] = 'fail'

        self.assertRaisesRegexp(
               HPCStatsSourceError,
               "format of wckey fail is not valid",
               self.importer.load)

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_load_project_not_found(self):
        """JobImporterSlurm.load() raises exception when project not found."""

        self.load_app()
        self.app.projects.projects = [ ]

        self.assertRaisesRegexp(
               HPCStatsSourceError,
               "project project1 not found in loaded projects",
               self.importer.load)

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_load_business_not_found(self):
        """JobImporterSlurm.load() raises exception when business not found."""

        self.load_app()
        self.app.business.businesses = [ ]

        self.assertRaisesRegexp(
               HPCStatsSourceError,
               "business code business1 not found in loaded business codes",
               self.importer.load)

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_load_invalid_nodelist(self):
        """JobImporterSlurm.load() raises exception when format of nodelist
           is invalid.
        """

        self.load_app()

        MockMySQLdb.MY_REQS['get_jobs_after_batchid']['res'][0][12] = \
          'nodelistfail[5-4]'

        self.assertRaisesRegexp(
               HPCStatsSourceError,
               "could not parse nodeset nodelistfail\[5\-4\] for job 0",
               self.importer.load)

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_load_node_not_found(self):
        """JobImporterSlurm.load() raises exception when node not found."""

        self.load_app()
        self.app.arch.nodes = [ ]

        self.assertRaisesRegexp(
               HPCStatsSourceError,
               "unable to find node node1 for job 0 in loaded nodes",
               self.importer.load)

    def test_job_partition(self):
        """JobImporterSlurm.job_partition() must return correct partition for
           based on job partition list and its nodelist.
        """

        # Only one element in job partition list: it must be returned whatever
        # the nodelist and ArchitectureImporter job partitions
        self.app.arch.partitions = { }
        result = self.importer.job_partition(0, 'partition2', 'node[1-100]')
        self.assertEquals(result, 'partition2')

        # Multiple elements but None nodelist: it must return arbitrary the
        # first partition
        self.app.arch.partitions = { }
        result = self.importer.job_partition(0, 'partition1,partition2', None)
        self.assertEquals(result, 'partition1')

        # Multiple elements in partition and defined nodelist: it must return
        # a corresponding partition loaded by ArchitectureImporter and
        # associated to a nodelist that fully intersects
        self.app.arch.partitions = { 'node[1-100]': [ 'partitionX', 'partition2'] }
        result = self.importer.job_partition(0, 'partition1,partition2', 'node[1-100]')
        self.assertEquals(result, 'partition2')

        self.app.arch.partitions = { 'node[1-99]': [ 'partition1' ],
                                     'node[1-100],bm[1-10]': [ 'partitionX', 'partition2' ] }
        result = self.importer.job_partition(0, 'partition1,partition2', 'node[1-100]')
        self.assertEquals(result, 'partition2')
class TestsEventImporterSlurm(HPCStatsTestCase):
    @mock.patch("HPCStats.DB.HPCStatsDB.psycopg2", mock_psycopg2())
    def setUp(self):
        # setup conf
        self.filename = 'fake'
        self.cluster = Cluster('testcluster')
        HPCStatsConf.__bases__ = (MockConfigParser, object)
        self.conf = HPCStatsConf(self.filename, self.cluster)
        self.conf.conf = CONFIG.copy()
        # setup importer
        self.db = HPCStatsDB(self.conf)
        self.db.bind()
        self.app = MockApp(self.db, self.conf, self.cluster)
        self.importer = EventImporterSlurm(self.app, self.db, self.conf,
                                           self.cluster)
        init_reqs()
        # setup logger
        logging.setLoggerClass(HPCStatsLogger)
        self.logger = logging.getLogger(__name__)
        self.handler = MockLoggingHandler()
        self.logger.addHandler(self.handler)
        self.handler.reset()
        HPCStatsLogger.set_error_mgr(HPCStatsErrorMgr(self.conf))

    def test_init(self):
        """EventImporterSlurm.__init__() initializes w/o error
        """
        pass

    def init_load_data(self):
        """Utility method to initialize data to make load() simply."""

        self.e1_start = datetime(2015, 3, 2, 15, 59, 59)
        self.e1_end = datetime(2015, 3, 2, 16, 0, 0)
        self.node_name = 'node1'
        e1_start_ts = time.mktime(self.e1_start.timetuple())
        e1_end_ts = time.mktime(self.e1_end.timetuple())

        MockMySQLdb.MY_REQS['get_events']['res'] = \
          [
            [ e1_start_ts, e1_end_ts, self.node_name, '1=16', 35, 'reason1' ],
          ]
        MockMySQLdb.MY_REQS['event_table_cols']['res'] = []

        self.app.arch.nodes = [
            Node(self.node_name, self.cluster, 'model1', 'partition1', 16, 8,
                 0),
        ]

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_is_old_schema(self):
        """EventImporterSlurm._is_old_schema() should return True is SlurmDBD
           <15.08 is detected, False otherwise."""

        self.importer.connect_db()
        MockMySQLdb.MY_REQS['event_table_cols']['res'] = \
          [ [ 'cpu_count', ] , ]
        self.assertEquals(self.importer._is_old_schema(), True)

        MockMySQLdb.MY_REQS['event_table_cols']['res'] = []
        self.assertEquals(self.importer._is_old_schema(), False)

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_load_simple(self):
        """EventImporterSlurm.load() works with simple data."""

        self.init_load_data()
        self.importer.load()
        self.assertEquals(1, len(self.importer.events))
        event = self.importer.events[0]
        self.assertEquals(event.start_datetime, self.e1_start)
        self.assertEquals(event.end_datetime, self.e1_end)
        self.assertEquals(event.nb_cpu, 16)
        self.assertEquals(event.event_type, 'ALLOCATED+RES')
        self.assertEquals(event.reason, 'reason1')

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_load_old_schema(self):
        """EventImporterSlurm.load() works with simple data from old SlurmDBD
           <15.08 schema."""

        self.init_load_data()
        MockMySQLdb.MY_REQS['event_table_cols']['res'] = \
          [ [ 'cpu_count', ] , ]
        MockMySQLdb.MY_REQS['get_events']['res'][0][3] = 16
        self.importer.load()
        event = self.importer.events[0]
        self.assertEquals(event.nb_cpu, 16)

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    @mock.patch("%s.EventImporterSlurm.get_new_events" % (module))
    def test_load_search_datetime(self, mock_new_events):
        """EventImporterSlurm.load() must search new events starting from
           correct datetime."""

        # Both datetimes are defined, search must be done with start datetime
        # of oldest unfinished event.
        d1 = datetime(2015, 3, 2, 15, 59, 59)
        d2 = datetime(2015, 3, 2, 16, 0, 0)
        d1_ts = time.mktime(d1.timetuple())
        d2_ts = time.mktime(d2.timetuple())

        MockPg2.PG_REQS['get_end_last_event'].set_assoc(
            params=(self.cluster.cluster_id), result=[[d1_ts]])
        MockPg2.PG_REQS['get_start_oldest_unfinised_event'].set_assoc(
            params=(self.cluster.cluster_id), result=[[d2_ts]])

        self.importer.load()
        mock_new_events.assert_called_with(d2_ts)

        # None unfinished event, search must be done with end datetime of last
        # event.
        MockPg2.PG_REQS['get_end_last_event'].set_assoc(
            params=(self.cluster.cluster_id), result=[[d1_ts]])
        MockPg2.PG_REQS['get_start_oldest_unfinised_event'].set_assoc(
            params=(self.cluster.cluster_id), result=[])

        self.importer.load()
        mock_new_events.assert_called_with(d1_ts)

        default_datetime = datetime(1970, 1, 1, 0, 0)

        # No event in DB: search starting from epoch.
        MockPg2.PG_REQS['get_end_last_event'].set_assoc(
            params=(self.cluster.cluster_id), result=[])
        MockPg2.PG_REQS['get_start_oldest_unfinised_event'].set_assoc(
            params=(self.cluster.cluster_id), result=[])

        self.importer.load()
        mock_new_events.assert_called_with(default_datetime)

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_load_unfound_node(self):
        """EventImporterSlurm.load() raises Exception if one event is linked to
           a node not loaded by ArchitectureImporter."""

        self.init_load_data()
        self.app.arch.nodes = []
        self.importer.log = self.logger
        self.importer.load()
        self.assertIn(
            "EventImporterSlurm: ERROR E_E0001: event node %s is "
            "unknown in cluster %s architecture, ignoring this event" %
            (self.node_name, self.cluster.name),
            self.handler.messages['warning'])

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_load_invalid_tres(self):
        """EventImporterSlurm.load() raises exception if invalid tres for an
           event is found"""

        self.init_load_data()
        MockMySQLdb.MY_REQS['get_events']['res'][0][3] = '0=0'
        self.assertRaisesRegexp(HPCStatsSourceError,
                                "unable to extract cpu_count from event tres",
                                self.importer.load)

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_merge_successive_events(self):
        """EventImporterSlurm.merge_successive_events() should merge successive
           events in the list if they are on the same node w/ same type.
        """

        e1_start = datetime(2015, 3, 2, 16, 0, 0)
        e1_end = datetime(2015, 3, 2, 16, 10, 0)
        e2_start = datetime(2015, 3, 2, 16, 10, 0)
        e2_end = datetime(2015, 3, 2, 16, 20, 0)
        e3_start = datetime(2015, 3, 2, 16, 20, 0)
        e3_end = datetime(2015, 3, 2, 16, 30, 0)

        node1 = [
            Node('node1', self.cluster, 'model1', 'partition1', 16, 8, 0),
        ]
        node2 = [
            Node('node2', self.cluster, 'model1', 'partition1', 16, 8, 0),
        ]

        # 3 successive events on one node with same type, they must be merged
        # into one event.
        events = [
            Event(self.cluster, node1, 4, e1_start, e1_end, 'type1',
                  'reason1'),
            Event(self.cluster, node1, 4, e2_start, e2_end, 'type1',
                  'reason1'),
            Event(self.cluster, node1, 4, e3_start, e3_end, 'type1',
                  'reason1'),
        ]
        merged = self.importer.merge_successive_events(events)
        self.assertEquals(1, len(merged))
        self.assertEquals(merged[0].start_datetime, e1_start)
        self.assertEquals(merged[0].end_datetime, e3_end)
        self.assertEquals(merged[0].event_type, 'type1')
        self.assertEquals(merged[0].reason, 'reason1')

        # 3 successive events on one node node1 with same type, with one event
        # on another node node2 in the middle: all events on node1 must be
        # merged while the other event on node2 must stay as is.
        events = [
            Event(self.cluster, node1, 4, e1_start, e1_end, 'type1',
                  'reason1'),
            Event(self.cluster, node2, 4, e2_start, e2_end, 'type1',
                  'reason1'),
            Event(self.cluster, node1, 4, e2_start, e2_end, 'type1',
                  'reason1'),
            Event(self.cluster, node1, 4, e3_start, e3_end, 'type1',
                  'reason1'),
        ]
        merged = self.importer.merge_successive_events(events)
        self.assertEquals(2, len(merged))
        self.assertEquals(merged[0].start_datetime, e1_start)
        self.assertEquals(merged[0].end_datetime, e3_end)
        self.assertEquals(merged[1].end_datetime, e2_end)
        self.assertEquals(merged[0].node, node1)
        self.assertEquals(merged[1].node, node2)

        # 3 successive events on node1 but with different types, they must not
        # be merged.
        events = [
            Event(self.cluster, node1, 4, e1_start, e1_end, 'type1',
                  'reason1'),
            Event(self.cluster, node1, 4, e2_start, e2_end, 'type2',
                  'reason1'),
            Event(self.cluster, node1, 4, e3_start, e3_end, 'type1',
                  'reason1'),
        ]
        merged = self.importer.merge_successive_events(events)
        self.assertEquals(3, len(merged))

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_txt_slurm_event_type(self):
        """EventImporterSlurm.txt_slurm_event_type() should give the
           appropriate human readable string represation of an event type
           according to its hex bitmap value.
        """

        tests = [(0x0001, 'DOWN'), (0x0004, 'ERROR'), (0x0012, 'IDLE+NET'),
                 (0x8535, 'MIXED+NET+RES+RESUME+COMPLETING+MAINT')]
        for value, expected in tests:
            txt = EventImporterSlurm.txt_slurm_event_type(value)
            self.assertEquals(txt, expected)
class TestsBusinessCodeImporterCSVUpdate(HPCStatsTestCase):

    @mock.patch("HPCStats.DB.HPCStatsDB.psycopg2", mock_psycopg2())
    def setUp(self):
        self.filename = 'fake'
        self.cluster = Cluster('testcluster')
        HPCStatsConf.__bases__ = (MockConfigParser, object)
        self.conf = HPCStatsConf(self.filename, self.cluster.name)
        self.conf.conf = CONFIG
        self.app = None
        self.db = HPCStatsDB(self.conf)
        self.db.bind()
        self.importer = BusinessCodeImporterCSV(self.app, self.db, self.conf)

    def test_update_not_exists(self):
        """ProjectImporterCSV.update() works when business code does not exist
        """
        business1 = Business('code1', 'business description 1')
        self.importer.businesses = [ business1 ]
        self.importer.update()

    @mock.patch("%s.Business.save" % (module))
    def test_update_not_exists_with_mock(self, mock_save):
        """ProjectImporterCSV.update() call Business.save() when business code
           does not exist
        """

        business1 = Business('code1', 'business description 1')

        MockPg2.PG_REQS['existing_business'].set_assoc(
          params=( business1.code, ),
          result=[ ]
        )

        self.importer.businesses = [ business1 ]
        self.importer.update()
        mock_save.assert_called_with(self.db)

    def test_update_exists(self):
        """ProjectImporterCSV.update() works when business code exists
        """

        business1 = Business('code1', 'business description 1')

        MockPg2.PG_REQS['existing_business'].set_assoc(
          params=( business1.code, ),
          result=[ [ 'code1' ] ]
        )

        self.importer.businesses = [ business1 ]
        self.importer.update()

    @mock.patch("%s.Business.update" % (module))
    def test_update_exists_with_mock(self, mock_update):
        """ProjectImporterCSV.update() call Business.update() when business
           code exists
        """
        business1 = Business('code1', 'business description 1')

        MockPg2.PG_REQS['existing_business'].set_assoc(
          params=( business1.code, ),
          result=[ [ 'code1' ] ]
        )

        self.importer.businesses = [ business1 ]
        self.importer.update()
        mock_update.assert_called_with(self.db)
Beispiel #11
0
class TestsEventImporterSlurm(HPCStatsTestCase):

    @mock.patch("HPCStats.DB.HPCStatsDB.psycopg2", mock_psycopg2())
    def setUp(self):
        self.filename = 'fake'
        self.cluster = Cluster('testcluster')
        HPCStatsConf.__bases__ = (MockConfigParser, object)
        self.conf = HPCStatsConf(self.filename, self.cluster)
        self.conf.conf = CONFIG.copy()
        self.db = HPCStatsDB(self.conf)
        self.db.bind()
        self.app = MockApp(self.db, self.conf, self.cluster)
        self.importer = EventImporterSlurm(self.app,
                                           self.db,
                                           self.conf,
                                           self.cluster)
        init_reqs()

    def test_init(self):
        """EventImporterSlurm.__init__() initializes w/o error
        """
        pass

    def init_load_data(self):
        """Utility method to initialize data to make load() simply."""

        self.e1_start = datetime(2015, 3, 2, 15, 59, 59)
        self.e1_end = datetime(2015, 3, 2, 16, 0, 0)
        self.node_name = 'node1'
        e1_start_ts = time.mktime(self.e1_start.timetuple())
        e1_end_ts = time.mktime(self.e1_end.timetuple())

        MockMySQLdb.MY_REQS['get_events']['res'] = \
          [
            [ e1_start_ts, e1_end_ts, self.node_name, '1=16', 35, 'reason1' ],
          ]
        MockMySQLdb.MY_REQS['event_table_cols']['res'] = [ ]

        self.app.arch.nodes = [ Node(self.node_name, self.cluster, 'model1', 'partition1', 16, 8, 0), ]

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_is_old_schema(self):
        """EventImporterSlurm._is_old_schema() should return True is SlurmDBD
           <15.08 is detected, False otherwise."""

        self.importer.connect_db()
        MockMySQLdb.MY_REQS['event_table_cols']['res'] = \
          [ [ 'cpu_count', ] , ]
        self.assertEquals(self.importer._is_old_schema(), True)

        MockMySQLdb.MY_REQS['event_table_cols']['res'] = []
        self.assertEquals(self.importer._is_old_schema(), False)

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_load_simple(self):
        """EventImporterSlurm.load() works with simple data."""

        self.init_load_data()
        self.importer.load()
        self.assertEquals(1, len(self.importer.events))
        event = self.importer.events[0]
        self.assertEquals(event.start_datetime, self.e1_start)
        self.assertEquals(event.end_datetime, self.e1_end)
        self.assertEquals(event.nb_cpu, 16)
        self.assertEquals(event.event_type, 'ALLOCATED+RES')
        self.assertEquals(event.reason, 'reason1')

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_load_old_schema(self):
        """EventImporterSlurm.load() works with simple data from old SlurmDBD
           <15.08 schema."""

        self.init_load_data()
        MockMySQLdb.MY_REQS['event_table_cols']['res'] = \
          [ [ 'cpu_count', ] , ]
        MockMySQLdb.MY_REQS['get_events']['res'][0][3] = 16
        self.importer.load()
        event = self.importer.events[0]
        self.assertEquals(event.nb_cpu, 16)

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    @mock.patch("%s.EventImporterSlurm.get_new_events" % (module))
    def test_load_search_datetime(self, mock_new_events):
        """EventImporterSlurm.load() must search new events starting from
           correct datetime."""

        # Both datetimes are defined, search must be done with start datetime
        # of oldest unfinished event.
        d1 = datetime(2015, 3, 2, 15, 59, 59)
        d2 = datetime(2015, 3, 2, 16, 0, 0)
        d1_ts = time.mktime(d1.timetuple())
        d2_ts = time.mktime(d2.timetuple())

        MockPg2.PG_REQS['get_end_last_event'].set_assoc(
          params=( self.cluster.cluster_id ),
          result=[ [ d1_ts ] ]
        )
        MockPg2.PG_REQS['get_start_oldest_unfinised_event'].set_assoc(
          params= ( self.cluster.cluster_id ),
          result=[ [ d2_ts ] ]
        )

        self.importer.load()
        mock_new_events.assert_called_with(d2_ts)

        # None unfinished event, search must be done with end datetime of last
        # event.
        MockPg2.PG_REQS['get_end_last_event'].set_assoc(
          params=( self.cluster.cluster_id ),
          result=[ [ d1_ts ] ]
        )
        MockPg2.PG_REQS['get_start_oldest_unfinised_event'].set_assoc(
          params= ( self.cluster.cluster_id ),
          result=[ ]
        )

        self.importer.load()
        mock_new_events.assert_called_with(d1_ts)

        default_datetime = datetime(1970, 1, 1, 1, 0)

        # No event in DB: search starting from epoch.
        MockPg2.PG_REQS['get_end_last_event'].set_assoc(
          params=( self.cluster.cluster_id ),
          result=[ ]
        )
        MockPg2.PG_REQS['get_start_oldest_unfinised_event'].set_assoc(
          params= ( self.cluster.cluster_id ),
          result=[ ]
        )

        self.importer.load()
        mock_new_events.assert_called_with(default_datetime)

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_load_unfound_node(self):
        """EventImporterSlurm.load() raises Exception if one event is linked to
           a node not loaded by ArchitectureImporter."""

        self.init_load_data()
        self.app.arch.nodes = []
        self.assertRaisesRegexp(
               HPCStatsSourceError,
               "event node %s not found in loaded nodes" % (self.node_name),
               self.importer.load)

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_load_invalid_tres(self):
        """EventImporterSlurm.load() raises exception if invalid tres for an
           event is found"""

        self.init_load_data()
        MockMySQLdb.MY_REQS['get_events']['res'][0][3] = '0=0'
        self.assertRaisesRegexp(
               HPCStatsSourceError,
               "unable to extract cpu_count from event tres",
               self.importer.load)

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_merge_successive_events(self):
        """EventImporterSlurm.merge_successive_events() should merge successive
           events in the list if they are on the same node w/ same type.
        """

        e1_start = datetime(2015, 3, 2, 16,  0, 0)
        e1_end   = datetime(2015, 3, 2, 16, 10, 0)
        e2_start = datetime(2015, 3, 2, 16, 10, 0)
        e2_end   = datetime(2015, 3, 2, 16, 20, 0)
        e3_start = datetime(2015, 3, 2, 16, 20, 0)
        e3_end   = datetime(2015, 3, 2, 16, 30, 0)

        node1 = [ Node('node1', self.cluster, 'model1', 'partition1', 16, 8, 0), ]
        node2 = [ Node('node2', self.cluster, 'model1', 'partition1', 16, 8, 0), ]

        # 3 successive events on one node with same type, they must be merged
        # into one event.
        events = [
          Event(self.cluster, node1, 4, e1_start, e1_end, 'type1', 'reason1'),
          Event(self.cluster, node1, 4, e2_start, e2_end, 'type1', 'reason1'),
          Event(self.cluster, node1, 4, e3_start, e3_end, 'type1', 'reason1'),
        ]
        merged = self.importer.merge_successive_events(events)
        self.assertEquals(1, len(merged))
        self.assertEquals(merged[0].start_datetime, e1_start)
        self.assertEquals(merged[0].end_datetime, e3_end)
        self.assertEquals(merged[0].event_type, 'type1')
        self.assertEquals(merged[0].reason, 'reason1')

        # 3 successive events on one node node1 with same type, with one event
        # on another node node2 in the middle: all events on node1 must be
        # merged while the other event on node2 must stay as is.
        events = [
          Event(self.cluster, node1, 4, e1_start, e1_end, 'type1', 'reason1'),
          Event(self.cluster, node2, 4, e2_start, e2_end, 'type1', 'reason1'),
          Event(self.cluster, node1, 4, e2_start, e2_end, 'type1', 'reason1'),
          Event(self.cluster, node1, 4, e3_start, e3_end, 'type1', 'reason1'),
        ]
        merged = self.importer.merge_successive_events(events)
        self.assertEquals(2, len(merged))
        self.assertEquals(merged[0].start_datetime, e1_start)
        self.assertEquals(merged[0].end_datetime, e3_end)
        self.assertEquals(merged[1].end_datetime, e2_end)
        self.assertEquals(merged[0].node, node1)
        self.assertEquals(merged[1].node, node2)

        # 3 successive events on node1 but with different types, they must not
        # be merged.
        events = [
          Event(self.cluster, node1, 4, e1_start, e1_end, 'type1', 'reason1'),
          Event(self.cluster, node1, 4, e2_start, e2_end, 'type2', 'reason1'),
          Event(self.cluster, node1, 4, e3_start, e3_end, 'type1', 'reason1'),
        ]
        merged = self.importer.merge_successive_events(events)
        self.assertEquals(3, len(merged))

    @mock.patch("%s.MySQLdb" % (module), mock_mysqldb())
    def test_txt_slurm_event_type(self):
        """EventImporterSlurm.txt_slurm_event_type() should give the
           appropriate human readable string represation of an event type
           according to its hex bitmap value.
        """

        tests = [ ( 0x0001, 'DOWN' ),
                  ( 0x0004, 'ERROR' ),
                  ( 0x0012, 'IDLE+NET' ),
                  ( 0x8535, 'MIXED+NET+RES+RESUME+COMPLETING+MAINT' ) ]
        for value, expected in tests:
            txt = EventImporterSlurm.txt_slurm_event_type(value)
            self.assertEquals(txt, expected)
Beispiel #12
0
class TestsUserImporterLdap(HPCStatsTestCase):

    @mock.patch("HPCStats.DB.HPCStatsDB.psycopg2", mock_psycopg2())
    def setUp(self):
        self.filename = 'fake'
        self.cluster = Cluster('testcluster')
        self.cluster.cluster_id = 0
        HPCStatsConf.__bases__ = (MockConfigParser, object)
        self.conf = HPCStatsConf(self.filename, self.cluster)
        self.conf.conf = CONFIG.copy()
        self.db = HPCStatsDB(self.conf)
        self.db.bind()
        self.app = MockApp(self.db, self.conf, self.cluster)
        self.importer = UserImporterLdap(self.app,
                                         self.db,
                                         self.conf,
                                         self.cluster)
        init_reqs()

    def test_init(self):
        """UserImporterLdap.__init__() initializes w/o error
        """
        pass


    @mock.patch("%s.ldap" % (module), mock_ldap())
    @mock.patch("%s.os.path.isfile" % module)
    def test_check_groups_alias_file(self, m_isfile):
        """UserImporterLdap.check() should raise HPCStatsSourceError if
           groups_alias_file does not exist
        """

        # define file path to run the isfile check
        self.importer.groups_alias_file = 'test'

        # if file exist, everything is OK
        m_isfile.return_value = True
        self.importer.check()

        # if file does not exist, must raise HPCStatsSourceError
        m_isfile.return_value = False
        self.assertRaisesRegexp(
               HPCStatsSourceError,
               "Groups alias file test does not exist",
               self.importer.check)


    def test_load_groups_alias(self):
        """UserImporterLdap.load_groups_alias() tests
        """

        self.importer.groups_alias_file = 'test'

        # tests valid content
        aliases = "testAlong testA\n" \
                  "testBlong testB\n"

        m_open = mock.mock_open(read_data=aliases)
        with mock.patch("%s.open" % (module), m_open, create=True):
            self.importer.load_groups_alias()

        self.assertEquals(self.importer.groups_alias,
                          {'testAlong': 'testA',
                           'testBlong': 'testB'})

        # tests various invalid content
        wrong_aliases = ["testAlong", "testBlong testB fail\n",
                         "test:fail", "test;epic"]
        for wrong_alias in wrong_aliases:
            m_open = mock.mock_open(read_data=wrong_alias)
            with mock.patch("%s.open" % (module), m_open, create=True):
                self.assertRaisesRegexp(
                    HPCStatsSourceError,
                    "Malformed line in alias file test",
                    self.importer.load_groups_alias)


    @mock.patch("%s.ldap" % (module), mock_ldap())
    def test_load_simple(self):
        """UserImporterLdap.load() should work with simple data from LDAP.
        """

        users = [ 'login1', 'login2', 'login3' ]
        fill_ldap_users(CONFIG['testcluster/ldap'], users)

        MockPg2.PG_REQS['get_unclosed_accounts'].set_assoc (
          params=( 0, ),
          result=[ ]
        )

        self.importer.load()
        self.assertEquals(len(self.importer.users), 3)
        self.assertEquals(len(self.importer.accounts), 3)

    @mock.patch("%s.ldap" % (module), mock_ldap())
    def test_load_simple2(self):
        """UserImporterLdap.load() should work with simple data from LDAP and
           HPCStatsDB.
        """

        users = [ 'login1', 'login2', 'login3' ]
        fill_ldap_users(CONFIG['testcluster/ldap'], users)

        creation_user4 = datetime(2015, 3, 2, 16, 0, 1)
        MockPg2.PG_REQS['get_unclosed_accounts'].set_assoc(
          params=( 0, ),
          result=[ [ 0, 'login4', 'name_user4', 'firstname_user4',
                     'department_user4', 0, 0, creation_user4 ] ]
          )

        self.importer.load()
        self.assertEquals(len(self.importer.users), 4)
        self.assertEquals(len(self.importer.accounts), 4)


    @mock.patch("%s.ldap" % (module), mock_ldap())
    def test_load_user_no_dp_group(self):
        """UserImporterLdap.load() should work with user having no secondary group.
        """

        user = '******'
        fill_ldap_users(CONFIG['testcluster/ldap'], [user])
        # remove secondary group result for login1
        MockLdap.LDAP_REQS['secondary_groups_login1']['res'] = []

        # set the cn result of the prim_group LDAP query
        primary_group = 'primgrouptest'
        MockLdap.LDAP_REQS['prim_group_0']['res'][0][1]['cn'][0] = primary_group

        MockPg2.PG_REQS['get_unclosed_accounts'].set_assoc (
          params=( 0, ),
          result=[ ]
        )

        # test the user department has been computed based on its primary group
        self.importer.load()
        self.assertEquals(self.importer.users[0].department,
                          primary_group+'-unknown')

        # test with alternative default subdir
        subdir = 'testA'
        self.importer.default_subdir = subdir
        self.importer.load()
        self.assertEquals(self.importer.users[0].department,
                          primary_group+'-'+subdir)

        # test with alias
        primary_group_complicated = 'primgrouptestcomplicated'
        MockLdap.LDAP_REQS['prim_group_0']['res'][0][1]['cn'][0] = \
            primary_group_complicated
        self.importer.groups_alias = \
            { primary_group_complicated: primary_group }
        self.importer.load()
        self.assertEquals(self.importer.users[0].department,
                          primary_group+'-'+subdir)

        # test without primary group -> department must be None
        MockLdap.LDAP_REQS['prim_group_0']['res'] = {}
        self.importer.load()
        self.assertEquals(self.importer.users[0].department, None)

        # test with multiple primary groups -> must raise HPCStatsSourceError
        primary_group_complicated = 'primgrouptestcomplicated'
        MockLdap.LDAP_REQS['prim_group_0']['res'] = [ 'result1', 'result2' ]
        self.assertRaisesRegexp(
            HPCStatsSourceError,
            "too much results .%d. found for user %s primary group %d in " \
            "base %s" % (2, user, 0, self.importer.ldap_dn_groups),
            self.importer.load)


    @mock.patch("%s.ldap" % (module), mock_ldap())
    @mock.patch("%s.User.save" % (module))
    @mock.patch("%s.Account.save" % (module))
    def test_load_update_new_user_other_accounts(self, m_account_save, m_user_save):
        """UserImporterLdap.update() create new user/account found in LDAP
           and not found in HPCStatsDB with a creation date equals to today
           because there are already existing accounts.
        """

        users = [ 'login1' ]
        fill_ldap_users(CONFIG['testcluster/ldap'], users)

        MockPg2.PG_REQS['get_unclosed_accounts'].set_assoc(
          params=( self.cluster.cluster_id, ),
          result=[ ]
          )
        MockPg2.PG_REQS['nb_existing_accounts'].set_assoc(
          params=( self.cluster.cluster_id, ),
          result=[ [ 0 ], [ 1 ] ]
          )

        self.importer.load()
        self.importer.update()
        self.assertEquals(self.importer.accounts[0].creation_date, date.today())
        m_user_save.assert_called_with(self.db)
        m_account_save.assert_called_with(self.db)

    @mock.patch("%s.ldap" % (module), mock_ldap())
    @mock.patch("%s.User.save" % (module))
    @mock.patch("%s.Account.save" % (module))
    def test_load_update_new_user_no_account(self, m_account_save, m_user_save):
        """UserImporterLdap.update() save new user/account found in LDAP
           and not found in HPCStatsDB with a creation date equals to epoch
           because there is none existing accounts
        """

        users = [ 'login1' ]
        fill_ldap_users(CONFIG['testcluster/ldap'], users)

        self.importer.load()
        self.importer.update()
        self.assertEquals(self.importer.accounts[0].creation_date, date(1970, 1, 1))
        m_user_save.assert_called_with(self.db)
        m_account_save.assert_called_with(self.db)

    @mock.patch("%s.ldap" % (module), mock_ldap())
    @mock.patch("%s.Account.update" % (module))
    def test_load_update_close_account(self, m_account_update):
        """UserImporterLdap.update() close account found as unclosed in
           HPCStatsDB and not found in LDAP.
        """

        users = [ ]
        fill_ldap_users(CONFIG['testcluster/ldap'], users)

        creation_user2 = datetime(2015, 3, 2, 16, 0, 1)

        MockPg2.PG_REQS['get_unclosed_accounts'].set_assoc(
          params=( self.cluster.cluster_id, ),
          result=[ [ 2, 'login2', 'name_user2', 'firstname_user2',
                     'department_user2', 0, 0, creation_user2 ] ]
          )

        self.importer.load()
        self.importer.update()
        self.assertEquals(self.importer.accounts[0].deletion_date, date.today())
        m_account_update.assert_called_with(self.db)

    @mock.patch("%s.ldap" % (module), mock_ldap())
    @mock.patch("%s.User.update" % (module))
    @mock.patch("%s.Account.save" % (module))
    def test_load_update_user_wo_account(self, m_account_save, m_user_update):
        """UserImporterLdap.update() create account and update user found in
           LDAP and in HPCStatsDB but w/o account on the cluster.
        """

        users = [ 'login3' ]
        fill_ldap_users(CONFIG['testcluster/ldap'], users)

        user3_id = 3

        MockPg2.PG_REQS['find_user'].set_assoc(
          params=( 'login3', ),
          result=[ [ user3_id ] ]
          )
        MockPg2.PG_REQS['existing_account'].set_assoc(
          params=( user3_id, self.cluster.cluster_id, ),
          result=[ ]
          )

        self.importer.load()
        self.importer.update()
        m_user_update.assert_called_with(self.db)
        m_account_save.assert_called_with(self.db)

    @mock.patch("%s.ldap" % (module), mock_ldap())
    @mock.patch("%s.User.update" % (module))
    @mock.patch("%s.Account.update" % (module))
    def test_load_update_user_w_account(self, m_account_update, m_user_update):
        """UserImporterLdap.update() update user found in LDAP and in HPCStatsDB
           with an unclosed account on the cluster.
        """

        users = [ 'login4' ]
        fill_ldap_users(CONFIG['testcluster/ldap'], users)

        creation_user4 = datetime(2015, 3, 2, 16, 0, 1)
        user4_id = 4
        MockPg2.PG_REQS['find_user'].set_assoc(
          params=( 'login4', ),
          result=[ [ user4_id ] ]
          )
        MockPg2.PG_REQS['existing_account'].set_assoc(
          params=( user4_id, self.cluster.cluster_id, ),
          result=[ [ 0 ] ]
          )
        MockPg2.PG_REQS['load_account'].set_assoc(
          params=( user4_id, self.cluster.cluster_id, ),
          result=[ [ 0, 0, creation_user4, None ] ]
        )

        self.importer.load()
        self.importer.update()
        m_user_update.assert_called_with(self.db)
        # ensure Account.update() is not called
        self.assertRaises(AssertionError,
                          m_account_update.assert_called_with,
                          self.db,
                          None)

    @mock.patch("%s.ldap" % (module), mock_ldap())
    @mock.patch("%s.User.update" % (module))
    @mock.patch("%s.Account.update" % (module))
    def test_load_update_user_closed_account(self, m_account_update, m_user_update):
        """UserImporterLdap.update() update user found in LDAP and in HPCStatsDB
           with a closed account on the cluster.
        """
        users = [ 'login5' ]
        fill_ldap_users(CONFIG['testcluster/ldap'], users)

        user5_creation = datetime(2015, 3, 2, 16, 0, 1)
        user5_deletion = datetime(2015, 3, 2, 16, 0, 1)
        user5_id = 5
        MockPg2.PG_REQS['find_user'].set_assoc(
          params=( 'login5', ),
          result=[ [ user5_id ] ]
          )
        MockPg2.PG_REQS['existing_account'].set_assoc(
          params=( user5_id, self.cluster.cluster_id, ),
          result=[ [ 0 ] ]
          )
        MockPg2.PG_REQS['load_account'].set_assoc(
          params=( user5_id, self.cluster.cluster_id, ),
          result=[ [ 0, 0, user5_creation, user5_deletion ] ]
        )

        self.importer.load()
        self.importer.update()
        self.assertEquals(self.importer.accounts[0].deletion_date, None)
        m_user_update.assert_called_with(self.db)
        m_account_update.assert_called_with(self.db)