Esempio n. 1
0
 def create_special_vini_record(self, interface_hrn):
     # special case for vini
     if ".vini" in interface_hrn and interface_hrn.endswith('vini'):
         # create a fake internet2 site first
         i2site = {
             'name': 'Internet2',
             'login_base': 'internet2',
             'site_id': -1
         }
         site_hrn = _get_site_hrn(interface_hrn, i2site)
         # import if hrn is not in list of existing hrns or if the hrn exists
         # but its not a site record
         if (
                 'authority',
                 site_hrn,
         ) not in self.records_by_type_hrn:
             urn = hrn_to_urn(site_hrn, 'authority')
             if not self.auth_hierarchy.auth_exists(urn):
                 self.auth_hierarchy.create_auth(urn)
             auth_info = self.auth_hierarchy.get_auth_info(urn)
             auth_record = RegAuthority(hrn=site_hrn,
                                        gid=auth_info.get_gid_object(),
                                        pointer=site['site_id'],
                                        authority=get_authority(site_hrn))
             auth_record.just_created()
             global_dbsession.add(auth_record)
             global_dbsession.commit()
             self.logger.info(
                 "PlImporter: Imported authority (vini site) %s" %
                 auth_record)
             self.remember_record(site_record)
Esempio n. 2
0
 def create_interface_records(self):
     """
     Create a record for each SFA interface
     """
     # just create certs for all sfa interfaces even if they
     # aren't enabled
     auth_info = self.auth_hierarchy.get_auth_info(
         self.config.SFA_INTERFACE_HRN)
     pkey = auth_info.get_pkey_object()
     hrn = self.config.SFA_INTERFACE_HRN
     for type in [
             'authority+sa',
             'authority+am',
             'authority+sm',
     ]:
         urn = hrn_to_urn(hrn, type)
         gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
         # for now we have to preserve the authority+<> stuff
         if self.record_exists(type, hrn): continue
         interface_record = RegAuthority(type=type,
                                         hrn=hrn,
                                         gid=gid,
                                         authority=get_authority(hrn))
         interface_record.just_created()
         global_dbsession.add(interface_record)
         global_dbsession.commit()
         self.logger.info("SfaImporter: imported authority (%s) %s " %
                          (type, interface_record))
Esempio n. 3
0
    def import_users(self, existing_hrns, existing_records):
        # Get all users
        users = self.shell.auth_manager.users.list()
        users_dict = {}
        keys_filename = self.config.config_path + os.sep + 'person_keys.py'
        old_user_keys = load_keys(keys_filename)
        user_keys = {}
        for user in users:
            auth_hrn = self.config.SFA_INTERFACE_HRN
            if user.tenantId is not None:
                tenant = self.shell.auth_manager.tenants.find(id=user.tenantId)
                auth_hrn = OSXrn(name=tenant.name,
                                 auth=self.config.SFA_INTERFACE_HRN,
                                 type='authority').get_hrn()
            hrn = OSXrn(name=user.name, auth=auth_hrn, type='user').get_hrn()
            users_dict[hrn] = user
            old_keys = old_user_keys.get(hrn, [])
            keyname = OSXrn(xrn=hrn, type='user').get_slicename()
            keys = [
                k.public_key
                for k in self.shell.nova_manager.keypairs.findall(name=keyname)
            ]
            user_keys[hrn] = keys
            update_record = False
            if old_keys != keys:
                update_record = True
            if hrn not in existing_hrns or \
                   (hrn, 'user') not in existing_records or update_record:
                urn = OSXrn(xrn=hrn, type='user').get_urn()

                if keys:
                    try:
                        pkey = convert_public_key(keys[0])
                    except:
                        self.logger.log_exc(
                            'unable to convert public key for %s' % hrn)
                        pkey = Keypair(create=True)
                else:
                    self.logger.warn(
                        "OpenstackImporter: person %s does not have a PL public key"
                        % hrn)
                    pkey = Keypair(create=True)
                user_gid = self.auth_hierarchy.create_gid(urn,
                                                          create_uuid(),
                                                          pkey,
                                                          email=user.email)
                user_record = RegUser()
                user_record.type = 'user'
                user_record.hrn = hrn
                user_record.gid = user_gid
                user_record.authority = get_authority(hrn)
                global_dbsession.add(user_record)
                global_dbsession.commit()
                self.logger.info("OpenstackImporter: imported person %s" %
                                 user_record)

        return users_dict, user_keys
Esempio n. 4
0
    def run(self, options):
        """
        Create the special iotlab table, lease_table, in the SFA database.
        Import everything (users, slices, nodes and sites from OAR
        and LDAP) into the SFA database.
        Delete stale records that are no longer in OAR or LDAP.
        :param options:
        :type options:
        """

        config = Config()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH

        testbed_shell = IotlabShell(config)
        # leases_db = TestbedAdditionalSfaDB(config)
        #Create special slice table for iotlab

        if not self.exists('lease_table'):
            init_tables(engine)
            self.logger.info("IotlabImporter.run:  lease_table table created ")

        # import site and node records in site into the SFA db.
        self.import_sites_and_nodes(testbed_shell)
        #import users and slice into the SFA DB.
        self.import_persons_and_slices(testbed_shell)

        ### remove stale records
        # special records must be preserved
        system_hrns = [
            interface_hrn, root_auth, interface_hrn + '.slicemanager'
        ]
        for record in self.all_records:
            if record.hrn in system_hrns:
                record.stale = False
            if record.peer_authority:
                record.stale = False

        for record in self.all_records:
            if record.type == 'user':
                self.logger.info("IotlabImporter: stale records: hrn %s %s" %
                                 (record.hrn, record.stale))
            try:
                stale = record.stale
            except:
                stale = True
                self.logger.warning("stale not found with %s" % record)
            if stale:
                self.logger.info("IotlabImporter: deleting stale record: %s" %
                                 (record))

                try:
                    global_dbsession.delete(record)
                    global_dbsession.commit()
                except SQLAlchemyError:
                    self.logger.log_exc("IotlabImporter: failed to delete \
                        stale record %s" % (record))
Esempio n. 5
0
    def run(self, options):
        """
        Create the special iotlab table, lease_table, in the SFA database.
        Import everything (users, slices, nodes and sites from OAR
        and LDAP) into the SFA database.
        Delete stale records that are no longer in OAR or LDAP.
        :param options:
        :type options:
        """

        config = Config ()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH

        testbed_shell = IotlabShell(config)
        # leases_db = TestbedAdditionalSfaDB(config)
        #Create special slice table for iotlab

        if not self.exists('lease_table'):
            init_tables(engine)
            self.logger.info("IotlabImporter.run:  lease_table table created ")

        # import site and node records in site into the SFA db.
        self.import_sites_and_nodes(testbed_shell)
        #import users and slice into the SFA DB.
        self.import_persons_and_slices(testbed_shell)

         ### remove stale records
        # special records must be preserved
        system_hrns = [interface_hrn, root_auth,
                        interface_hrn + '.slicemanager']
        for record in self.all_records:
            if record.hrn in system_hrns:
                record.stale = False
            if record.peer_authority:
                record.stale = False

        for record in self.all_records:
            if record.type == 'user':
                self.logger.info("IotlabImporter: stale records: hrn %s %s"
                                 % (record.hrn, record.stale))
            try:
                stale = record.stale
            except:
                stale = True
                self.logger.warning("stale not found with %s" % record)
            if stale:
                self.logger.info("IotlabImporter: deleting stale record: %s"
                                 % (record))

                try:
                    global_dbsession.delete(record)
                    global_dbsession.commit()
                except SQLAlchemyError:
                    self.logger.log_exc("IotlabImporter: failed to delete \
                        stale record %s" % (record))
Esempio n. 6
0
    def import_sites_and_nodes(self, testbed_shell):
        """

        Gets all the sites and nodes from OAR, process the information,
        creates hrns and RegAuthority for sites, and feed them to the database.
        For each site, import the site's nodes to the DB by calling
        import_nodes.

        :param testbed_shell: IotlabDriver object, used to have access to
            testbed_shell methods and fetching info on sites and nodes.
        :type testbed_shell: IotlabDriver
        """

        sites_listdict = testbed_shell.GetSites()
        nodes_listdict = testbed_shell.GetNodes()
        nodes_by_id = dict([(node['node_id'], node)
                            for node in nodes_listdict])
        for site in sites_listdict:
            site_hrn = site['name']
            site_record = self.find_record_by_type_hrn('authority', site_hrn)
            self.logger.info("IotlabImporter: import_sites_and_nodes \
                                    (site) %s \r\n " % site_record)
            if not site_record:
                try:
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)

                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    site_record = \
                        RegAuthority(hrn=site_hrn,
                                     gid=auth_info.get_gid_object(),
                                     pointer='-1',
                                     authority=get_authority(site_hrn))
                    site_record.just_created()
                    global_dbsession.add(site_record)
                    global_dbsession.commit()
                    self.logger.info("IotlabImporter: imported authority \
                                    (site) %s" % site_record)
                    self.update_just_added_records_dict(site_record)
                except SQLAlchemyError:
                    # if the site import fails then there is no point in
                    # trying to import the
                    # site's child records(node, slices, persons), so skip them.
                    self.logger.log_exc("IotlabImporter: failed to import \
                        site. Skipping child records")
                    continue
            else:
                # xxx update the record ...
                pass

            site_record.stale = False
            self.import_nodes(site['node_ids'], nodes_by_id, testbed_shell)

        return
Esempio n. 7
0
    def import_sites_and_nodes(self, testbed_shell):
        """

        Gets all the sites and nodes from OAR, process the information,
        creates hrns and RegAuthority for sites, and feed them to the database.
        For each site, import the site's nodes to the DB by calling
        import_nodes.

        :param testbed_shell: IotlabDriver object, used to have access to
            testbed_shell methods and fetching info on sites and nodes.
        :type testbed_shell: IotlabDriver
        """

        sites_listdict = testbed_shell.GetSites()
        nodes_listdict = testbed_shell.GetNodes()
        nodes_by_id = dict([(node['node_id'], node) for node in nodes_listdict])
        for site in sites_listdict:
            site_hrn = site['name']
            site_record = self.find_record_by_type_hrn ('authority', site_hrn)
            self.logger.info("IotlabImporter: import_sites_and_nodes \
                                    (site) %s \r\n " % site_record)
            if not site_record:
                try:
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)

                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    site_record = \
                        RegAuthority(hrn=site_hrn,
                                     gid=auth_info.get_gid_object(),
                                     pointer='-1',
                                     authority=get_authority(site_hrn))
                    site_record.just_created()
                    global_dbsession.add(site_record)
                    global_dbsession.commit()
                    self.logger.info("IotlabImporter: imported authority \
                                    (site) %s" % site_record)
                    self.update_just_added_records_dict(site_record)
                except SQLAlchemyError:
                    # if the site import fails then there is no point in
                    # trying to import the
                    # site's child records(node, slices, persons), so skip them.
                    self.logger.log_exc("IotlabImporter: failed to import \
                        site. Skipping child records")
                    continue
            else:
                # xxx update the record ...
                pass

            site_record.stale = False
            self.import_nodes(site['node_ids'], nodes_by_id, testbed_shell)

        return
Esempio n. 8
0
    def import_slice(self, slice_hrn, slice_record, user_record):
        """

         Create RegSlice record according to the slice hrn if the slice
         does not exist yet.Creates a relationship with the user record
         associated with the slice.
         Commit the record to the database.


        :param slice_hrn: Human readable name of the slice.
        :type slice_hrn: string
        :param slice_record: record of the slice found in the DB, if any.
        :type slice_record: RegSlice or None
        :param user_record: user record found in the DB if any.
        :type user_record: RegUser

        .. todo::Update the record if a slice record already exists.
        """
        if not slice_record:
            pkey = Keypair(create=True)
            urn = hrn_to_urn(slice_hrn, 'slice')
            slice_gid = \
                self.auth_hierarchy.create_gid(urn,
                                               create_uuid(), pkey)
            slice_record = RegSlice(hrn=slice_hrn, gid=slice_gid,
                                    pointer='-1',
                                    authority=get_authority(slice_hrn))
            try:
                slice_record.just_created()
                global_dbsession.add(slice_record)
                global_dbsession.commit()


                self.update_just_added_records_dict(slice_record)

            except SQLAlchemyError:
                self.logger.log_exc("IotlabImporter: failed to import slice")

        #No slice update upon import in iotlab
        else:
            # xxx update the record ...
            self.logger.warning("Iotlab Slice update not implemented")

        # record current users affiliated with the slice
        slice_record.reg_researchers = [user_record]
        try:
            global_dbsession.commit()
            slice_record.stale = False
        except SQLAlchemyError:
            self.logger.log_exc("IotlabImporter: failed to update slice")
Esempio n. 9
0
    def import_users(self, existing_hrns, existing_records):
        # Get all users
        users = self.shell.auth_manager.users.list()
        users_dict = {}
        keys_filename = self.config.config_path + os.sep + 'person_keys.py'
        old_user_keys = load_keys(keys_filename)
        user_keys = {}
        for user in users:
            auth_hrn = self.config.SFA_INTERFACE_HRN
            try:
                user_tenantId = user.tenantId
                user_email = user.email
            except AttributeError, e:
                user_tenantId = None
                user_email = None

            if user_tenantId is not None:
                #import pdb; pdb.set_trace()
                tenant = self.shell.auth_manager.tenants.find(id=user_tenantId)
                if tenant is not None:
                    auth_hrn = OSXrn(name=tenant.name, auth=self.config.SFA_INTERFACE_HRN, type='authority').get_hrn()
            hrn = OSXrn(name=user.name, auth=auth_hrn, type='user').get_hrn()
            users_dict[hrn] = user
            old_keys = old_user_keys.get(hrn, [])
            keyname = OSXrn(xrn=hrn, type='user').get_slicename()
            keys = [k.public_key for k in self.shell.compute_manager.keypairs.findall(name=keyname)]
            user_keys[hrn] = keys
            update_record = False
            if old_keys != keys:
                update_record = True
            if hrn not in existing_hrns or \
                   (hrn, 'user') not in existing_records or update_record:
                urn = OSXrn(xrn=hrn, type='user').get_urn()
                if keys:
                    try:
                        pkey = convert_public_key(keys[0])
                    except:
                        self.logger.log_exc('unable to convert public key for %s' % hrn)
                        pkey = Keypair(create=True)
                else:
                    self.logger.warn("OpenstackImporter: person %s does not have a PL public key"%hrn)
                    pkey = Keypair(create=True)
                user_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey, email=user_email)
                user_record = RegUser(type='user', 
                                      hrn=hrn, 
                                      gid=user_gid, 
                                      authority=get_authority(hrn))
                global_dbsession.add(user_record)
                global_dbsession.commit()
                self.logger.info("OpenstackImporter: imported person %s" % user_record)
Esempio n. 10
0
    def import_slice(self, slice_hrn, slice_record, user_record):
        """

         Create RegSlice record according to the slice hrn if the slice
         does not exist yet.Creates a relationship with the user record
         associated with the slice.
         Commit the record to the database.


        :param slice_hrn: Human readable name of the slice.
        :type slice_hrn: string
        :param slice_record: record of the slice found in the DB, if any.
        :type slice_record: RegSlice or None
        :param user_record: user record found in the DB if any.
        :type user_record: RegUser

        .. todo::Update the record if a slice record already exists.
        """
        if not slice_record:
            pkey = Keypair(create=True)
            urn = hrn_to_urn(slice_hrn, 'slice')
            slice_gid = \
                self.auth_hierarchy.create_gid(urn,
                                               create_uuid(), pkey)
            slice_record = RegSlice(hrn=slice_hrn,
                                    gid=slice_gid,
                                    pointer='-1',
                                    authority=get_authority(slice_hrn))
            try:
                slice_record.just_created()
                global_dbsession.add(slice_record)
                global_dbsession.commit()

                self.update_just_added_records_dict(slice_record)

            except SQLAlchemyError:
                self.logger.log_exc("IotlabImporter: failed to import slice")

        #No slice update upon import in iotlab
        else:
            # xxx update the record ...
            self.logger.warning("Iotlab Slice update not implemented")

        # record current users affiliated with the slice
        slice_record.reg_researchers = [user_record]
        try:
            global_dbsession.commit()
            slice_record.stale = False
        except SQLAlchemyError:
            self.logger.log_exc("IotlabImporter: failed to update slice")
Esempio n. 11
0
    def run(self, options):
        # we don't have any options for now
        self.logger.info("OpenstackImporter.run : to do")

        # create dict of all existing sfa records
        existing_records = {}
        existing_hrns = []
        key_ids = []
        for record in global_dbsession.query(RegRecord):
            existing_records[(
                record.hrn,
                record.type,
            )] = record
            existing_hrns.append(record.hrn)

        tenants_dict = self.import_tenants(existing_hrns, existing_records)
        users_dict, user_keys = self.import_users(existing_hrns,
                                                  existing_records)

        # remove stale records
        system_records = [
            self.interface_hrn, self.root_auth,
            self.interface_hrn + '.slicemanager'
        ]
        for (record_hrn, type) in existing_records.keys():
            if record_hrn in system_records:
                continue

            record = existing_records[(record_hrn, type)]
            if record.peer_authority:
                continue

            if type == 'user':
                if record_hrn in users_dict:
                    continue
            elif type in ['slice', 'authority']:
                if record_hrn in tenants_dict:
                    continue
            else:
                continue

            record_object = existing_records[(record_hrn, type)]
            self.logger.info("OpenstackImporter: removing %s " % record)
            global_dbsession.delete(record_object)
            global_dbsession.commit()

        # save pub keys
        self.logger.info('OpenstackImporter: saving current pub keys')
        keys_filename = self.config.config_path + os.sep + 'person_keys.py'
        save_keys(keys_filename, user_keys)
Esempio n. 12
0
    def import_tenants(self, existing_hrns, existing_records):
        # Get all tenants
        # A tenant can represent an organizational group (site) or a
        # slice. If a tenant's authorty/parent matches the root authority it is
        # considered a group/site. All other tenants are considered slices.
        tenants = self.shell.auth_manager.tenants.list()
        tenants_dict = {}
        for tenant in tenants:
            hrn = self.config.SFA_INTERFACE_HRN + '.' + tenant.name
            tenants_dict[hrn] = tenant
            authority_hrn = OSXrn(xrn=hrn,
                                  type='authority').get_authority_hrn()

            if hrn in existing_hrns:
                continue

            if authority_hrn == self.config.SFA_INTERFACE_HRN:
                # import group/site
                record = RegAuthority()
                urn = OSXrn(xrn=hrn, type='authority').get_urn()
                if not self.auth_hierarchy.auth_exists(urn):
                    self.auth_hierarchy.create_auth(urn)
                auth_info = self.auth_hierarchy.get_auth_info(urn)
                gid = auth_info.get_gid_object()
                record.type = 'authority'
                record.hrn = hrn
                record.gid = gid
                record.authority = get_authority(hrn)
                global_dbsession.add(record)
                global_dbsession.commit()
                self.logger.info("OpenstackImporter: imported authority: %s" %
                                 record)

            else:
                record = RegSlice()
                urn = OSXrn(xrn=hrn, type='slice').get_urn()
                pkey = Keypair(create=True)
                gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                record.type = 'slice'
                record.hrn = hrn
                record.gid = gid
                record.authority = get_authority(hrn)
                global_dbsession.add(record)
                global_dbsession.commit()
                self.logger.info("OpenstackImporter: imported slice: %s" %
                                 record)

        return tenants_dict
Esempio n. 13
0
    def import_tenants(self, existing_hrns, existing_records):
        # Get all tenants
        # A tenant can represent an organizational group (site) or a
        # slice. If a tenant's authorty/parent matches the root authority it is
        # considered a group/site. All other tenants are considered slices.
        tenants = self.shell.auth_manager.tenants.list()
        tenants_dict = {}
        for tenant in tenants:
            hrn = self.config.SFA_INTERFACE_HRN + '.' + tenant.name
            tenants_dict[hrn] = tenant
            authority_hrn = OSXrn(xrn=hrn, type='authority').get_authority_hrn()

            if hrn in existing_hrns:
                continue

            if authority_hrn == self.config.SFA_INTERFACE_HRN:
                # import group/site
                record = RegAuthority()
                urn = OSXrn(xrn=hrn, type='authority').get_urn()
                if not self.auth_hierarchy.auth_exists(urn):
                    self.auth_hierarchy.create_auth(urn)
                auth_info = self.auth_hierarchy.get_auth_info(urn)
                gid = auth_info.get_gid_object()
                record.type='authority'
                record.hrn=hrn
                record.gid=gid
                record.authority=get_authority(hrn)
                global_dbsession.add(record)
                global_dbsession.commit()
                self.logger.info("OpenstackImporter: imported authority: %s" % record)

            else:
                record = RegSlice ()
                urn = OSXrn(xrn=hrn, type='slice').get_urn()
                pkey = Keypair(create=True)
                gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                record.type='slice'
                record.hrn=hrn
                record.gid=gid
                record.authority=get_authority(hrn)
                global_dbsession.add(record)
                global_dbsession.commit()
                self.logger.info("OpenstackImporter: imported slice: %s" % record) 

        return tenants_dict
Esempio n. 14
0
    def create_sm_client_record(self):
        """
        Create a user record for the Slicemanager service.
        """
        hrn = self.interface_hrn + '.slicemanager'
        urn = hrn_to_urn(hrn, 'user')
        if not self.auth_hierarchy.auth_exists(urn):
            self.logger.info("SfaImporter: creating Slice Manager user")
            self.auth_hierarchy.create_auth(urn)

        if self.record_exists ('user',hrn): return
        auth_info = self.auth_hierarchy.get_auth_info(hrn)
        user_record = RegUser(hrn=hrn, gid=auth_info.get_gid_object(),
                              authority=get_authority(hrn))
        user_record.just_created()
        global_dbsession.add (user_record)
        global_dbsession.commit()
        self.logger.info("SfaImporter: importing user (slicemanager) %s " % user_record)
Esempio n. 15
0
    def run (self, options):
        # we don't have any options for now
        self.logger.info ("OpenstackImporter.run : to do")

        # create dict of all existing sfa records
        existing_records = {}
        existing_hrns = []
        key_ids = []
        for record in global_dbsession.query(RegRecord):
            existing_records[ (record.hrn, record.type,) ] = record
            existing_hrns.append(record.hrn) 
            

        tenants_dict = self.import_tenants(existing_hrns, existing_records)
        users_dict, user_keys = self.import_users(existing_hrns, existing_records)
                
        # remove stale records    
        system_records = [self.interface_hrn, self.root_auth, self.interface_hrn + '.slicemanager']
        for (record_hrn, type) in existing_records.keys():
            if record_hrn in system_records:
                continue
        
            record = existing_records[(record_hrn, type)]
            if record.peer_authority:
                continue

            if type == 'user':
                if record_hrn in users_dict:
                    continue  
            elif type in['slice', 'authority']:
                if record_hrn in tenants_dict:
                    continue
            else:
                continue 
        
            record_object = existing_records[ (record_hrn, type) ]
            self.logger.info("OpenstackImporter: removing %s " % record)
            global_dbsession.delete(record_object)
            global_dbsession.commit()
                                   
        # save pub keys
        self.logger.info('OpenstackImporter: saving current pub keys')
        keys_filename = self.config.config_path + os.sep + 'person_keys.py'
        save_keys(keys_filename, user_keys)                
Esempio n. 16
0
 def create_interface_records(self):
     """
     Create a record for each SFA interface
     """
     # just create certs for all sfa interfaces even if they
     # aren't enabled
     auth_info = self.auth_hierarchy.get_auth_info(self.config.SFA_INTERFACE_HRN)
     pkey = auth_info.get_pkey_object()
     hrn=self.config.SFA_INTERFACE_HRN
     for type in  [ 'authority+sa', 'authority+am', 'authority+sm', ]:
         urn = hrn_to_urn(hrn, type)
         gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
         # for now we have to preserve the authority+<> stuff
         if self.record_exists (type,hrn): continue
         interface_record = RegAuthority(type=type, hrn=hrn, gid=gid,
                                         authority=get_authority(hrn))
         interface_record.just_created()
         global_dbsession.add (interface_record)
         global_dbsession.commit()
         self.logger.info("SfaImporter: imported authority (%s) %s " % (type,interface_record))
Esempio n. 17
0
    def create_sm_client_record(self):
        """
        Create a user record for the Slicemanager service.
        """
        hrn = self.interface_hrn + '.slicemanager'
        urn = hrn_to_urn(hrn, 'user')
        if not self.auth_hierarchy.auth_exists(urn):
            self.logger.info("SfaImporter: creating Slice Manager user")
            self.auth_hierarchy.create_auth(urn)

        if self.record_exists('user', hrn): return
        auth_info = self.auth_hierarchy.get_auth_info(hrn)
        user_record = RegUser(hrn=hrn,
                              gid=auth_info.get_gid_object(),
                              authority=get_authority(hrn))
        user_record.just_created()
        global_dbsession.add(user_record)
        global_dbsession.commit()
        self.logger.info("SfaImporter: importing user (slicemanager) %s " %
                         user_record)
Esempio n. 18
0
 def create_special_vini_record (self, interface_hrn):
     # special case for vini
     if ".vini" in interface_hrn and interface_hrn.endswith('vini'):
         # create a fake internet2 site first
         i2site = {'name': 'Internet2', 'login_base': 'internet2', 'site_id': -1}
         site_hrn = _get_site_hrn(interface_hrn, i2site)
         # import if hrn is not in list of existing hrns or if the hrn exists
         # but its not a site record
         if ( 'authority', site_hrn, ) not in self.records_by_type_hrn:
             urn = hrn_to_urn(site_hrn, 'authority')
             if not self.auth_hierarchy.auth_exists(urn):
                 self.auth_hierarchy.create_auth(urn)
             auth_info = self.auth_hierarchy.get_auth_info(urn)
             auth_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
                                        pointer=site['site_id'],
                                        authority=get_authority(site_hrn))
             auth_record.just_created()
             global_dbsession.add(auth_record)
             global_dbsession.commit()
             self.logger.info("PlImporter: Imported authority (vini site) %s"%auth_record)
             self.remember_record ( site_record )
Esempio n. 19
0
    def create_top_level_auth_records(self, hrn):
        """
        Create top level db records (includes root and sub authorities (local/remote)
        """
        # make sure parent exists
        parent_hrn = get_authority(hrn)
        if not parent_hrn:
            parent_hrn = hrn
        if not parent_hrn == hrn:
            self.create_top_level_auth_records(parent_hrn)

        # ensure key and cert exists:
        self.auth_hierarchy.create_top_level_auth(hrn)
        # create the db record if it doesnt already exist
        if not self.record_exists ('authority',hrn):
            auth_info = self.auth_hierarchy.get_auth_info(hrn)
            auth_record = RegAuthority(hrn=hrn, gid=auth_info.get_gid_object(),
                                       authority=get_authority(hrn))
            auth_record.just_created()
            global_dbsession.add (auth_record)
            global_dbsession.commit()
            self.logger.info("SfaImporter: imported authority (parent) %s " % auth_record)     
Esempio n. 20
0
    def create_top_level_auth_records(self, hrn):
        """
        Create top level db records (includes root and sub authorities (local/remote)
        """
        # make sure parent exists
        parent_hrn = get_authority(hrn)
        if not parent_hrn:
            parent_hrn = hrn
        if not parent_hrn == hrn:
            self.create_top_level_auth_records(parent_hrn)

        # ensure key and cert exists:
        self.auth_hierarchy.create_top_level_auth(hrn)
        # create the db record if it doesnt already exist
        if not self.record_exists('authority', hrn):
            auth_info = self.auth_hierarchy.get_auth_info(hrn)
            auth_record = RegAuthority(hrn=hrn,
                                       gid=auth_info.get_gid_object(),
                                       authority=get_authority(hrn))
            auth_record.just_created()
            global_dbsession.add(auth_record)
            global_dbsession.commit()
            self.logger.info("SfaImporter: imported authority (parent) %s " %
                             auth_record)
Esempio n. 21
0
    def run(self, options):
        config = Config()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = PlShell(config)

        ######## retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()

        # create hash by (type,hrn)
        # we essentially use this to know if a given record is already known to SFA
        self.records_by_type_hrn = \
            dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] )
        # create hash by (type,pointer)
        self.records_by_type_pointer = \
            dict ( [ ( (record.type, record.pointer) , record ) for record in all_records
                     if record.pointer != -1] )

        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
        for record in all_records:
            record.stale = True

        ######## retrieve PLC data
        # Get all plc sites
        # retrieve only required stuf
        sites = shell.GetSites({
            'peer_id': None,
            'enabled': True
        }, [
            'site_id', 'login_base', 'node_ids', 'slice_ids', 'person_ids',
            'name'
        ])
        # create a hash of sites by login_base
        #        sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
        # Get all plc users
        persons = shell.GetPersons({
            'peer_id': None,
            'enabled': True
        }, ['person_id', 'email', 'key_ids', 'site_ids', 'role_ids'])
        # create a hash of persons by person_id
        persons_by_id = dict([(person['person_id'], person)
                              for person in persons])
        # also gather non-enabled user accounts so as to issue relevant warnings
        disabled_persons = shell.GetPersons({
            'peer_id': None,
            'enabled': False
        }, ['person_id'])
        disabled_person_ids = [
            person['person_id'] for person in disabled_persons
        ]
        # Get all plc public keys
        # accumulate key ids for keys retrieval
        key_ids = []
        for person in persons:
            key_ids.extend(person['key_ids'])
        keys = shell.GetKeys({
            'peer_id': None,
            'key_id': key_ids,
            'key_type': 'ssh'
        })
        # create a hash of keys by key_id
        keys_by_id = dict([(key['key_id'], key) for key in keys])
        # create a dict person_id -> [ (plc)keys ]
        keys_by_person_id = {}
        for person in persons:
            pubkeys = []
            for key_id in person['key_ids']:
                # by construction all the keys we fetched are ssh keys
                # so gpg keys won't be in there
                try:
                    key = keys_by_id[key_id]
                    pubkeys.append(key)
                except:
                    self.logger.warning(
                        "Could not spot key %d - probably non-ssh" % key_id)
            keys_by_person_id[person['person_id']] = pubkeys
        # Get all plc nodes
        nodes = shell.GetNodes({'peer_id': None},
                               ['node_id', 'hostname', 'site_id'])
        # create hash by node_id
        nodes_by_id = dict([(
            node['node_id'],
            node,
        ) for node in nodes])
        # Get all plc slices
        slices = shell.GetSlices({'peer_id': None},
                                 ['slice_id', 'name', 'person_ids'])
        # create hash by slice_id
        slices_by_id = dict([(slice['slice_id'], slice) for slice in slices])

        # isolate special vini case in separate method
        self.create_special_vini_record(interface_hrn)

        # start importing
        for site in sites:
            try:
                site_sfa_created = shell.GetSiteSfaCreated(site['site_id'])
            except:
                site_sfa_created = None
            if site['name'].startswith('sfa:') or site_sfa_created == 'True':
                continue

            site_hrn = _get_site_hrn(interface_hrn, site)
            # import if hrn is not in list of existing hrns or if the hrn exists
            # but its not a site record
            site_record = self.locate_by_type_hrn('authority', site_hrn)
            if not site_record:
                try:
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)
                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    site_record = RegAuthority(
                        hrn=site_hrn,
                        gid=auth_info.get_gid_object(),
                        pointer=site['site_id'],
                        authority=get_authority(site_hrn))
                    site_record.just_created()
                    global_dbsession.add(site_record)
                    global_dbsession.commit()
                    self.logger.info(
                        "PlImporter: imported authority (site) : %s" %
                        site_record)
                    self.remember_record(site_record)
                except:
                    # if the site import fails then there is no point in trying to import the
                    # site's child records (node, slices, persons), so skip them.
                    self.logger.log_exc(
                        "PlImporter: failed to import site %s. Skipping child records"
                        % site_hrn)
                    continue
            else:
                # xxx update the record ...
                pass
            site_record.stale = False

            # import node records
            for node_id in site['node_ids']:
                try:
                    node = nodes_by_id[node_id]
                except:
                    self.logger.warning(
                        "PlImporter: cannot find node_id %s - ignored" %
                        node_id)
                    continue
                site_auth = get_authority(site_hrn)
                site_name = site['login_base']
                node_hrn = hostname_to_hrn(site_auth, site_name,
                                           node['hostname'])
                # xxx this sounds suspicious
                if len(node_hrn) > 64: node_hrn = node_hrn[:64]
                node_record = self.locate_by_type_hrn('node', node_hrn)
                if not node_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(node_hrn, 'node')
                        node_gid = self.auth_hierarchy.create_gid(
                            urn, create_uuid(), pkey)
                        node_record = RegNode(
                            hrn=node_hrn,
                            gid=node_gid,
                            pointer=node['node_id'],
                            authority=get_authority(node_hrn))
                        node_record.just_created()
                        global_dbsession.add(node_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported node: %s" %
                                         node_record)
                        self.remember_record(node_record)
                    except:
                        self.logger.log_exc(
                            "PlImporter: failed to import node %s" % node_hrn)
                        continue
                else:
                    # xxx update the record ...
                    pass
                node_record.stale = False

            site_pis = []
            # import persons
            for person_id in site['person_ids']:
                proceed = False
                if person_id in persons_by_id:
                    person = persons_by_id[person_id]
                    proceed = True
                elif person_id in disabled_person_ids:
                    pass
                else:
                    self.logger.warning(
                        "PlImporter: cannot locate person_id %s in site %s - ignored"
                        % (person_id, site_hrn))
                # make sure to NOT run this if anything is wrong
                if not proceed: continue

                person_hrn = email_to_hrn(site_hrn, person['email'])
                # xxx suspicious again
                if len(person_hrn) > 64: person_hrn = person_hrn[:64]
                person_urn = hrn_to_urn(person_hrn, 'user')

                user_record = self.locate_by_type_hrn('user', person_hrn)

                # return a tuple pubkey (a plc key object) and pkey (a Keypair object)
                def init_person_key(person, plc_keys):
                    pubkey = None
                    if person['key_ids']:
                        # randomly pick first key in set
                        pubkey = plc_keys[0]
                        try:
                            pkey = convert_public_key(pubkey['key'])
                        except:
                            self.logger.warn(
                                'PlImporter: unable to convert public key for %s'
                                % person_hrn)
                            pkey = Keypair(create=True)
                    else:
                        # the user has no keys. Creating a random keypair for the user's gid
                        self.logger.warn(
                            "PlImporter: person %s does not have a PL public key"
                            % person_hrn)
                        pkey = Keypair(create=True)
                    return (pubkey, pkey)

                # new person
                try:
                    plc_keys = keys_by_person_id.get(person['person_id'], [])
                    if not user_record:
                        (pubkey, pkey) = init_person_key(person, plc_keys)
                        person_gid = self.auth_hierarchy.create_gid(
                            person_urn,
                            create_uuid(),
                            pkey,
                            email=person['email'])
                        user_record = RegUser(
                            hrn=person_hrn,
                            gid=person_gid,
                            pointer=person['person_id'],
                            authority=get_authority(person_hrn),
                            email=person['email'])
                        if pubkey:
                            user_record.reg_keys = [
                                RegKey(pubkey['key'], pubkey['key_id'])
                            ]
                        else:
                            self.logger.warning("No key found for user %s" %
                                                user_record)
                        user_record.just_created()
                        global_dbsession.add(user_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported person: %s" %
                                         user_record)
                        self.remember_record(user_record)
                    else:
                        # update the record ?
                        #
                        # if a user key has changed then we need to update the
                        # users gid by forcing an update here
                        #
                        # right now, SFA only has *one* key attached to a user, and this is
                        # the key that the GID was made with
                        # so the logic here is, we consider that things are OK (unchanged) if
                        # all the SFA keys are present as PLC keys
                        # otherwise we trigger the creation of a new gid from *some* plc key
                        # and record this on the SFA side
                        # it would make sense to add a feature in PLC so that one could pick a 'primary'
                        # key but this is not available on the myplc side for now
                        # = or = it would be much better to support several keys in SFA but that
                        # does not seem doable without a major overhaul in the data model as
                        # a GID is attached to a hrn, but it's also linked to a key, so...
                        # NOTE: with this logic, the first key entered in PLC remains the one
                        # current in SFA until it is removed from PLC
                        sfa_keys = user_record.reg_keys

                        def sfa_key_in_list(sfa_key, plc_keys):
                            for plc_key in plc_keys:
                                if plc_key['key'] == sfa_key.key:
                                    return True
                            return False

                        # are all the SFA keys known to PLC ?
                        new_keys = False
                        if not sfa_keys and plc_keys:
                            new_keys = True
                        else:
                            for sfa_key in sfa_keys:
                                if not sfa_key_in_list(sfa_key, plc_keys):
                                    new_keys = True
                        if new_keys:
                            (pubkey, pkey) = init_person_key(person, plc_keys)
                            person_gid = self.auth_hierarchy.create_gid(
                                person_urn, create_uuid(), pkey)
                            person_gid.set_email(person['email'])
                            if not pubkey:
                                user_record.reg_keys = []
                            else:
                                user_record.reg_keys = [
                                    RegKey(pubkey['key'], pubkey['key_id'])
                                ]
                            user_record.gid = person_gid
                            user_record.just_updated()
                            self.logger.info("PlImporter: updated person: %s" %
                                             user_record)
                    user_record.email = person['email']
                    global_dbsession.commit()
                    user_record.stale = False
                    # accumulate PIs - PLCAPI has a limitation that when someone has PI role
                    # this is valid for all sites she is in..
                    # PI is coded with role_id==20
                    if 20 in person['role_ids']:
                        site_pis.append(user_record)
                except:
                    self.logger.log_exc(
                        "PlImporter: failed to import person %d %s" %
                        (person['person_id'], person['email']))

            # maintain the list of PIs for a given site
            # for the record, Jordan had proposed the following addition as a welcome hotfix to a previous version:
            # site_pis = list(set(site_pis))
            # this was likely due to a bug in the above logic, that had to do with disabled persons
            # being improperly handled, and where the whole loop on persons
            # could be performed twice with the same person...
            # so hopefully we do not need to eliminate duplicates explicitly here anymore
            site_record.reg_pis = list(set(site_pis))
            global_dbsession.commit()

            # import slices
            for slice_id in site['slice_ids']:
                try:
                    slice = slices_by_id[slice_id]
                except:
                    self.logger.warning(
                        "PlImporter: cannot locate slice_id %s - ignored" %
                        slice_id)
                slice_hrn = slicename_to_hrn(interface_hrn, slice['name'])
                slice_record = self.locate_by_type_hrn('slice', slice_hrn)
                if not slice_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(slice_hrn, 'slice')
                        slice_gid = self.auth_hierarchy.create_gid(
                            urn, create_uuid(), pkey)
                        slice_record = RegSlice(
                            hrn=slice_hrn,
                            gid=slice_gid,
                            pointer=slice['slice_id'],
                            authority=get_authority(slice_hrn))
                        slice_record.just_created()
                        global_dbsession.add(slice_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported slice: %s" %
                                         slice_record)
                        self.remember_record(slice_record)
                    except:
                        self.logger.log_exc(
                            "PlImporter: failed to import slice %s (%s)" %
                            (slice_hrn, slice['name']))
                else:
                    # xxx update the record ...
                    # given that we record the current set of users anyways, there does not seem to be much left to do here
                    # self.logger.warning ("Slice update not yet implemented on slice %s (%s)"%(slice_hrn,slice['name']))
                    pass
                # record current users affiliated with the slice
                slice_record.reg_researchers = \
                    [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['person_ids'] ]
                global_dbsession.commit()
                slice_record.stale = False

        ### remove stale records
        # special records must be preserved
        system_hrns = [
            interface_hrn, root_auth, interface_hrn + '.slicemanager'
        ]
        for record in all_records:
            if record.hrn in system_hrns:
                record.stale = False
            if record.peer_authority:
                record.stale = False
            if ".vini" in interface_hrn and interface_hrn.endswith('vini') and \
                record.hrn.endswith("internet2"):
                record.stale = False

        for record in all_records:
            try:
                stale = record.stale
            except:
                stale = True
                self.logger.warning("stale not found with %s" % record)
            if stale:
                self.logger.info("PlImporter: deleting stale record: %s" %
                                 record)
                global_dbsession.delete(record)
                global_dbsession.commit()
Esempio n. 22
0
    def import_single_node(self, nodename):
        '''
        Method to import a single node from the testbed database to the SFA Registry.
        The node being imported is specified by name. 
        The method is used in the verify_node method (clab_slices.py) when a node is automatically
        created in the testbed database.
        
        :param nodename: name of the node being imported
        :type string        
        '''
        config = Config()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = ClabShell(config)

        self.logger.debug("Import Single node: %s" % nodename)

        # retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()

        # Dicts to avoid duplicates in SFA database
        # create dict keyed by (type,hrn)
        self.records_by_type_hrn = dict([((record.type, record.hrn), record)
                                         for record in all_records])
        # create dict keyed by (type,pointer)
        self.records_by_type_pointer = dict([((record.type, record.pointer),
                                              record) for record in all_records
                                             if record.pointer != -1])

        # Retrieve data from the CLab testbed and create dictionaries by id
        # SITE
        site = shell.get_testbed_info()

        # NODES
        node = shell.get_node_by(node_name=nodename)

        # Import records to the SFA registry
        # SITE
        # Get hrn of the site (authority)
        site_hrn = _get_site_hrn(interface_hrn, site)
        # Try to locate the site_hrn in the SFA records
        #site_record=self.locate_by_type_hrn ('authority', site_hrn)

        # NODE
        # Obtain parameters of the node: site_auth, site_name and hrn of the node
        site_auth = get_authority(site_hrn)
        site_name = site['name']
        node_hrn = hostname_to_hrn(site_hrn, node['name'])
        # Reduce hrn up to 64 characters
        if len(node_hrn) > 64: node_hrn = node_hrn[:64]

        # Try to locate the node_hrn in the SFA records
        node_record = self.locate_by_type_hrn('node', node_hrn)
        if not node_record:
            # Create/Import record for the node
            try:
                # Create a keypair for the node
                pkey = Keypair(create=True)
                # Obtain parameters
                urn = hrn_to_urn(node_hrn, 'node')
                node_gid = self.auth_hierarchy.create_gid(
                    urn, create_uuid(), pkey)
                # Create record for the node and add it to the Registry
                node_record = RegNode(hrn=node_hrn,
                                      gid=node_gid,
                                      pointer=node['id'],
                                      authority=get_authority(node_hrn))
                node_record.just_created()
                global_dbsession.add(node_record)
                global_dbsession.commit()
                self.logger.info("CLabImporter: imported node: %s" % node_hrn)
                self.remember_record(node_record)
            except:
                self.logger.log_exc("CLabImporter: failed to import node")
        else:
            # Node record already in the SFA registry. Update?
            pass
Esempio n. 23
0
    def import_persons_and_slices(self, testbed_shell):
        """

        Gets user data from LDAP, process the information.
        Creates hrn for the user's slice, the user's gid, creates
        the RegUser record associated with user. Creates the RegKey record
        associated nwith the user's key.
        Saves those records into the SFA DB.
        import the user's slice onto the database as well by calling
        import_slice.

        :param testbed_shell: IotlabDriver object, used to have access to
            testbed_shell attributes.
        :type testbed_shell: IotlabDriver

        .. warning:: does not support multiple keys per user
        """
        ldap_person_listdict = testbed_shell.GetPersons()
        self.logger.info("IOTLABIMPORT \t ldap_person_listdict %s \r\n"
                         % (ldap_person_listdict))

         # import persons
        for person in ldap_person_listdict:

            self.logger.info("IotlabImporter: person :" % (person))
            if 'ssh-rsa' not in person['pkey']:
                #people with invalid ssh key (ssh-dss, empty, bullshit keys...)
                #won't be imported
                continue
            person_hrn = person['hrn']
            slice_hrn = self.slicename_to_hrn(person['hrn'])

            # xxx suspicious again
            if len(person_hrn) > 64:
                person_hrn = person_hrn[:64]
            person_urn = hrn_to_urn(person_hrn, 'user')


            self.logger.info("IotlabImporter: users_rec_by_email %s "
                             % (self.users_rec_by_email))

            #Check if user using person['email'] from LDAP is already registered
            #in SFA. One email = one person. In this case, do not create another
            #record for this person
            #person_hrn returned by GetPerson based on iotlab root auth +
            #uid ldap
            user_record = self.find_record_by_type_hrn('user', person_hrn)

            if not user_record and person['email'] in self.users_rec_by_email:
                user_record = self.users_rec_by_email[person['email']]
                person_hrn = user_record.hrn
                person_urn = hrn_to_urn(person_hrn, 'user')


            slice_record = self.find_record_by_type_hrn('slice', slice_hrn)

            iotlab_key = person['pkey']
            # new person
            if not user_record:
                (pubkey, pkey) = self.init_person_key(person, iotlab_key)
                if pubkey is not None and pkey is not None:
                    person_gid = \
                        self.auth_hierarchy.create_gid(person_urn,
                                                       create_uuid(), pkey)
                    if person['email']:
                        self.logger.debug("IOTLAB IMPORTER \
                            PERSON EMAIL OK email %s " % (person['email']))
                        person_gid.set_email(person['email'])
                        user_record = \
                            RegUser(hrn=person_hrn,
                                    gid=person_gid,
                                    pointer='-1',
                                    authority=get_authority(person_hrn),
                                    email=person['email'])
                    else:
                        user_record = \
                            RegUser(hrn=person_hrn,
                                    gid=person_gid,
                                    pointer='-1',
                                    authority=get_authority(person_hrn))

                    if pubkey:
                        user_record.reg_keys = [RegKey(pubkey)]
                    else:
                        self.logger.warning("No key found for user %s"
                                            % (user_record))

                        try:
                            user_record.just_created()
                            global_dbsession.add (user_record)
                            global_dbsession.commit()
                            self.logger.info("IotlabImporter: imported person \
                                            %s" % (user_record))
                            self.update_just_added_records_dict(user_record)

                        except SQLAlchemyError:
                            self.logger.log_exc("IotlabImporter: \
                                failed to import person  %s" % (person))
            else:
                # update the record ?
                # if user's primary key has changed then we need to update
                # the users gid by forcing an update here
                sfa_keys = user_record.reg_keys

                new_key = False
                if iotlab_key is not sfa_keys:
                    new_key = True
                if new_key:
                    self.logger.info("IotlabImporter: \t \t USER UPDATE \
                        person: %s" % (person['hrn']))
                    (pubkey, pkey) = self.init_person_key(person, iotlab_key)
                    person_gid = \
                        self.auth_hierarchy.create_gid(person_urn,
                                                       create_uuid(), pkey)
                    if not pubkey:
                        user_record.reg_keys = []
                    else:
                        user_record.reg_keys = [RegKey(pubkey)]
                    self.logger.info("IotlabImporter: updated person: %s"
                                     % (user_record))

                if person['email']:
                    user_record.email = person['email']

            try:
                global_dbsession.commit()
                user_record.stale = False
            except SQLAlchemyError:
                self.logger.log_exc("IotlabImporter: \
                failed to update person  %s"% (person))

            self.import_slice(slice_hrn, slice_record, user_record)
Esempio n. 24
0
    def import_single_slice(self, slicename):
        '''
        Method to import a single slice from the testbed database to the SFA Registry.
        The slice being imported is specified by name. 
        The method is used in the verify_slice method (clab_slices.py) when a slice is automatically
        created in the testbed database.
        
        :param slicename: name of the slice being imported
        :type string        
        '''
        config = Config()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = ClabShell(config)

        self.logger.debug("Import Single slice: %s" % slicename)

        # retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()

        # Dicts to avoid duplicates in SFA database
        # create dict keyed by (type,hrn)
        self.records_by_type_hrn = dict([((record.type, record.hrn), record)
                                         for record in all_records])
        # create dict keyed by (type,pointer)
        self.records_by_type_pointer = dict([((record.type, record.pointer),
                                              record) for record in all_records
                                             if record.pointer != -1])

        # Retrieve data from the CLab testbed and create dictionaries by id
        # SITE
        site = shell.get_testbed_info()

        # SLICES
        slice = shell.get_slice_by(slice_name=slicename)

        # Import records to the SFA registry
        # SITE
        # Get hrn of the site (authority)
        site_hrn = _get_site_hrn(interface_hrn, site)
        # Try to locate the site_hrn in the SFA records
        #site_record=self.locate_by_type_hrn ('authority', site_hrn)

        # For the current site authority, import child entities/records
        # SLICES
        # Obtain parameters of the node: site_auth, site_name and hrn of the slice
        slice_hrn = slicename_to_hrn(slice['name'], site_hrn)
        # Try to locate the slice_hrn in the SFA records
        slice_record = self.locate_by_type_hrn('slice', slice_hrn)

        if not slice_record:
            # Create/Import record for the slice
            try:
                #Create a keypair for the slice
                pkey = Keypair(create=True)
                # Obtain parameters
                urn = hrn_to_urn(slice_hrn, 'slice')
                slice_gid = self.auth_hierarchy.create_gid(
                    urn, create_uuid(), pkey)
                # Create record for the slice and add it to the Registry
                slice_record = RegSlice(hrn=slice_hrn,
                                        gid=slice_gid,
                                        pointer=slice['id'],
                                        authority=get_authority(slice_hrn))
                slice_record.just_created()
                global_dbsession.add(slice_record)
                global_dbsession.commit()
                self.logger.info("CLabImporter: imported slice: %s" %
                                 slice_hrn)
                self.remember_record(slice_record)
            except:
                self.logger.log_exc("CLabImporter: failed to import slice")
        else:
            # Slice record already in the SFA registry. Update?
            self.logger.warning("Slice already existing in SFA Registry")
            pass

        # Get current users associated with the slice
        users_of_slice = shell.get_users_by_slice(slice)
        # record current users associated with the slice
        slice_record.reg_researchers = \
            [ self.locate_by_type_pointer ('user',user['id']) for user in users_of_slice]
        global_dbsession.commit()
Esempio n. 25
0
    def import_nodes(self, site_node_ids, nodes_by_id, testbed_shell):
        """

        Creates appropriate hostnames and RegNode records for each node in
        site_node_ids, based on the information given by the dict nodes_by_id
        that was made from data from OAR. Saves the records to the DB.

        :param site_node_ids: site's node ids
        :type site_node_ids: list of integers
        :param nodes_by_id: dictionary , key is the node id, value is the a dict
            with node information.
        :type nodes_by_id: dictionary
        :param testbed_shell: IotlabDriver object, used to have access to
            testbed_shell attributes.
        :type testbed_shell: IotlabDriver

        :returns: None
        :rtype: None

        """

        for node_id in site_node_ids:
            try:
                node = nodes_by_id[node_id]
            except KeyError:
                self.logger.warning("IotlabImporter: cannot find node_id %s \
                        - ignored" % (node_id))
                continue
            escaped_hrn =  \
                self.hostname_to_hrn_escaped(testbed_shell.root_auth,
                                             node['hostname'])
            self.logger.info("IOTLABIMPORTER node %s " % (node))
            hrn = node['hrn']

            # xxx this sounds suspicious
            if len(hrn) > 64:
                hrn = hrn[:64]
            node_record = self.find_record_by_type_hrn('node', hrn)
            if not node_record:
                pkey = Keypair(create=True)
                urn = hrn_to_urn(escaped_hrn, 'node')
                node_gid = \
                    self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)

                def testbed_get_authority(hrn):
                    """ Gets the authority part in the hrn.
                    :param hrn: hrn whose authority we are looking for.
                    :type hrn: string
                    :returns: splits the hrn using the '.' separator and returns
                        the authority part of the hrn.
                    :rtype: string

                    """
                    return hrn.split(".")[0]

                node_record = RegNode(hrn=hrn, gid=node_gid,
                                      pointer='-1',
                                      authority=testbed_get_authority(hrn))
                try:

                    node_record.just_created()
                    global_dbsession.add(node_record)
                    global_dbsession.commit()
                    self.logger.info("IotlabImporter: imported node: %s"
                                     % node_record)
                    self.update_just_added_records_dict(node_record)
                except SQLAlchemyError:
                    self.logger.log_exc("IotlabImporter: failed to import node")
            else:
                #TODO:  xxx update the record ...
                pass
            node_record.stale = False
Esempio n. 26
0
    def run (self, options):
        config = Config ()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = PlShell (config)

        ######## retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()

        # create hash by (type,hrn) 
        # we essentially use this to know if a given record is already known to SFA 
        self.records_by_type_hrn = \
            dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] )
        # create hash by (type,pointer) 
        self.records_by_type_pointer = \
            dict ( [ ( (record.type, record.pointer) , record ) for record in all_records 
                     if record.pointer != -1] )

        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
        for record in all_records: record.stale=True

        ######## retrieve PLC data
        # Get all plc sites
        # retrieve only required stuf
        sites = shell.GetSites({'peer_id': None, 'enabled' : True},
                               ['site_id','login_base','node_ids','slice_ids','person_ids', 'name', 'hrn'])
        # create a hash of sites by login_base
#        sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
        # Get all plc users
        persons = shell.GetPersons({'peer_id': None, 'enabled': True}, 
                                   ['person_id', 'email', 'key_ids', 'site_ids', 'role_ids', 'hrn'])
        # create a hash of persons by person_id
        persons_by_id = dict ( [ ( person['person_id'], person) for person in persons ] )
        # also gather non-enabled user accounts so as to issue relevant warnings
        disabled_persons = shell.GetPersons({'peer_id': None, 'enabled': False}, ['person_id'])
        disabled_person_ids = [ person['person_id'] for person in disabled_persons ] 
        # Get all plc public keys
        # accumulate key ids for keys retrieval
        key_ids = []
        for person in persons:
            key_ids.extend(person['key_ids'])
        keys = shell.GetKeys( {'peer_id': None, 'key_id': key_ids,
                               'key_type': 'ssh'} )
        # create a hash of keys by key_id
        keys_by_id = dict ( [ ( key['key_id'], key ) for key in keys ] ) 
        # create a dict person_id -> [ (plc)keys ]
        keys_by_person_id = {} 
        for person in persons:
            pubkeys = []
            for key_id in person['key_ids']:
                # by construction all the keys we fetched are ssh keys
                # so gpg keys won't be in there
                try:
                    key = keys_by_id[key_id]
                    pubkeys.append(key)
                except:
                    self.logger.warning("Could not spot key %d - probably non-ssh"%key_id)
            keys_by_person_id[person['person_id']] = pubkeys
        # Get all plc nodes  
        nodes = shell.GetNodes( {'peer_id': None}, ['node_id', 'hostname', 'site_id'])
        # create hash by node_id
        nodes_by_id = dict ( [ ( node['node_id'], node, ) for node in nodes ] )
        # Get all plc slices
        slices = shell.GetSlices( {'peer_id': None}, ['slice_id', 'name', 'person_ids', 'hrn'])
        # create hash by slice_id
        slices_by_id = dict ( [ (slice['slice_id'], slice ) for slice in slices ] )

        # isolate special vini case in separate method
        self.create_special_vini_record (interface_hrn)

        # Get top authority record
        top_auth_record=self.locate_by_type_hrn ('authority', root_auth)
        admins = []

        # start importing 
        for site in sites:
            try:
               site_sfa_created = shell.GetSiteSfaCreated(site['site_id'])
            except: 
               site_sfa_created = None
            if site['name'].startswith('sfa:') or site_sfa_created == 'True':
                continue

            #site_hrn = _get_site_hrn(interface_hrn, site)
            site_hrn = site['hrn']
            # import if hrn is not in list of existing hrns or if the hrn exists
            # but its not a site record
            site_record=self.locate_by_type_hrn ('authority', site_hrn)
            if not site_record:
                try:
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)
                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
                                               pointer=site['site_id'],
                                               authority=get_authority(site_hrn))
                    site_record.just_created()
                    global_dbsession.add(site_record)
                    global_dbsession.commit()
                    self.logger.info("PlImporter: imported authority (site) : %s" % site_record) 
                    self.remember_record (site_record)
                except:
                    # if the site import fails then there is no point in trying to import the
                    # site's child records (node, slices, persons), so skip them.
                    self.logger.log_exc("PlImporter: failed to import site %s. Skipping child records"%site_hrn) 
                    continue 
            else:
                # xxx update the record ...
                pass
            site_record.stale=False
             
            # import node records
            for node_id in site['node_ids']:
                try:
                    node = nodes_by_id[node_id]
                except:
                    self.logger.warning ("PlImporter: cannot find node_id %s - ignored"%node_id)
                    continue 
                site_auth = get_authority(site_hrn)
                site_name = site['login_base']
                node_hrn =  hostname_to_hrn(site_auth, site_name, node['hostname'])
                # xxx this sounds suspicious
                if len(node_hrn) > 64: node_hrn = node_hrn[:64]
                node_record = self.locate_by_type_hrn ( 'node', node_hrn )
                if not node_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(node_hrn, 'node')
                        node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        node_record = RegNode (hrn=node_hrn, gid=node_gid, 
                                               pointer =node['node_id'],
                                               authority=get_authority(node_hrn))
                        node_record.just_created()
                        global_dbsession.add(node_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported node: %s" % node_record)  
                        self.remember_record (node_record)
                    except:
                        self.logger.log_exc("PlImporter: failed to import node %s"%node_hrn) 
                        continue
                else:
                    # xxx update the record ...
                    pass
                node_record.stale=False

            site_pis=[]
            # import persons
            for person_id in site['person_ids']:
                proceed=False
                if person_id in persons_by_id:
                    person=persons_by_id[person_id]
                    proceed=True
                elif person_id in disabled_person_ids:
                    pass
                else:
                    self.logger.warning ("PlImporter: cannot locate person_id %s in site %s - ignored"%(person_id,site_hrn))
                # make sure to NOT run this if anything is wrong
                if not proceed: continue

                #person_hrn = email_to_hrn(site_hrn, person['email'])
                person_hrn = person['hrn']
                if person_hrn is None:
                    self.logger.warn("Person %s has no hrn - skipped"%person['email'])
                    continue
                # xxx suspicious again
                if len(person_hrn) > 64: person_hrn = person_hrn[:64]
                person_urn = hrn_to_urn(person_hrn, 'user')

                user_record = self.locate_by_type_hrn ( 'user', person_hrn)

                # return a tuple pubkey (a plc key object) and pkey (a Keypair object)
                def init_person_key (person, plc_keys):
                    pubkey=None
                    if  person['key_ids']:
                        # randomly pick first key in set
                        pubkey = plc_keys[0]
                        try:
                            pkey = convert_public_key(pubkey['key'])
                        except:
                            self.logger.warn('PlImporter: unable to convert public key for %s' % person_hrn)
                            pkey = Keypair(create=True)
                    else:
                        # the user has no keys. Creating a random keypair for the user's gid
                        self.logger.warn("PlImporter: person %s does not have a PL public key"%person_hrn)
                        pkey = Keypair(create=True)
                    return (pubkey, pkey)

                # new person
                try:
                    plc_keys = keys_by_person_id.get(person['person_id'],[])
                    if not user_record:
                        (pubkey,pkey) = init_person_key (person, plc_keys )
                        person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey, email=person['email'])
                        user_record = RegUser (hrn=person_hrn, gid=person_gid, 
                                               pointer=person['person_id'], 
                                               authority=get_authority(person_hrn),
                                               email=person['email'])
                        if pubkey: 
                            user_record.reg_keys=[RegKey (pubkey['key'], pubkey['key_id'])]
                        else:
                            self.logger.warning("No key found for user %s"%user_record)
                        user_record.just_created()
                        global_dbsession.add (user_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported person: %s" % user_record)
                        self.remember_record ( user_record )
                    else:
                        # update the record ?
                        #
                        # if a user key has changed then we need to update the
                        # users gid by forcing an update here
                        #
                        # right now, SFA only has *one* key attached to a user, and this is
                        # the key that the GID was made with
                        # so the logic here is, we consider that things are OK (unchanged) if
                        # all the SFA keys are present as PLC keys
                        # otherwise we trigger the creation of a new gid from *some* plc key
                        # and record this on the SFA side
                        # it would make sense to add a feature in PLC so that one could pick a 'primary'
                        # key but this is not available on the myplc side for now
                        # = or = it would be much better to support several keys in SFA but that
                        # does not seem doable without a major overhaul in the data model as
                        # a GID is attached to a hrn, but it's also linked to a key, so...
                        # NOTE: with this logic, the first key entered in PLC remains the one
                        # current in SFA until it is removed from PLC
                        sfa_keys = user_record.reg_keys
                        def sfa_key_in_list (sfa_key,plc_keys):
                            for plc_key in plc_keys:
                                if plc_key['key']==sfa_key.key:
                                    return True
                            return False
                        # are all the SFA keys known to PLC ?
                        new_keys=False
                        if not sfa_keys and plc_keys:
                            new_keys=True
                        else: 
                            for sfa_key in sfa_keys:
                                 if not sfa_key_in_list (sfa_key,plc_keys):
                                     new_keys = True
                        if new_keys:
                            (pubkey,pkey) = init_person_key (person, plc_keys)
                            person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey)
                            person_gid.set_email(person['email'])
                            if not pubkey:
                                user_record.reg_keys=[]
                            else:
                                user_record.reg_keys=[ RegKey (pubkey['key'], pubkey['key_id'])]
                            user_record.gid = person_gid
                            user_record.just_updated()
                            self.logger.info("PlImporter: updated person: %s" % user_record)
                    user_record.email = person['email']
                    global_dbsession.commit()
                    user_record.stale=False
                    # accumulate PIs - PLCAPI has a limitation that when someone has PI role
                    # this is valid for all sites she is in..
                    # PI is coded with role_id==20
                    if 20 in person['role_ids']:
                        site_pis.append (user_record)

                    # PL Admins need to marked as PI of the top authority record
                    if 10 in person['role_ids'] and user_record not in top_auth_record.reg_pis:
                        admins.append(user_record)

                except:
                    self.logger.log_exc("PlImporter: failed to import person %d %s"%(person['person_id'],person['email']))
    
            # maintain the list of PIs for a given site
            # for the record, Jordan had proposed the following addition as a welcome hotfix to a previous version:
            # site_pis = list(set(site_pis)) 
            # this was likely due to a bug in the above logic, that had to do with disabled persons
            # being improperly handled, and where the whole loop on persons
            # could be performed twice with the same person...
            # so hopefully we do not need to eliminate duplicates explicitly here anymore
            site_record.reg_pis = list(set(site_pis))
            global_dbsession.commit()

            # import slices
            for slice_id in site['slice_ids']:
                try:
                    slice = slices_by_id[slice_id]
                except:
                    self.logger.warning ("PlImporter: cannot locate slice_id %s - ignored"%slice_id)
                    continue
                #slice_hrn = slicename_to_hrn(interface_hrn, slice['name'])
                slice_hrn = slice['hrn']
                if slice_hrn is None:
                    self.logger.warning("Slice %s has no hrn - skipped"%slice['name'])
                    continue
                slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
                if not slice_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(slice_hrn, 'slice')
                        slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, 
                                                 pointer=slice['slice_id'],
                                                 authority=get_authority(slice_hrn))
                        slice_record.just_created()
                        global_dbsession.add(slice_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported slice: %s" % slice_record)  
                        self.remember_record ( slice_record )
                    except:
                        self.logger.log_exc("PlImporter: failed to import slice %s (%s)"%(slice_hrn,slice['name']))
                else:
                    # xxx update the record ...
                    # given that we record the current set of users anyways, there does not seem to be much left to do here
                    # self.logger.warning ("Slice update not yet implemented on slice %s (%s)"%(slice_hrn,slice['name']))
                    pass
                # record current users affiliated with the slice
                slice_record.reg_researchers = \
                    [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['person_ids'] ]
                global_dbsession.commit()
                slice_record.stale=False

        # Set PL Admins as PI's of the top authority
        if admins:
            top_auth_record.reg_pis = list(set(admins))
            global_dbsession.commit()
            self.logger.info('PlImporter: set PL admins %s as PIs of %s'%(admins,top_auth_record.hrn))

        ### remove stale records
        # special records must be preserved
        system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
        for record in all_records: 
            if record.hrn in system_hrns: 
                record.stale=False
            if record.peer_authority:
                record.stale=False
            if ".vini" in interface_hrn and interface_hrn.endswith('vini') and \
                record.hrn.endswith("internet2"):
                record.stale=False

        for record in all_records:
            try:        stale=record.stale
            except:     
                stale=True
                self.logger.warning("stale not found with %s"%record)
            if stale:
                self.logger.info("PlImporter: deleting stale record: %s" % record)
                global_dbsession.delete(record)
                global_dbsession.commit()
Esempio n. 27
0
    def run(self, options):
        config = Config()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = ClabShell(config)

        # retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()

        # Delete all default records
        #for record in all_records:
        #    global_dbsession.delete(record)
        #    global_dbsession.commit()
        #all_records = global_dbsession.query(RegRecord).all()

        # Dicts to avoid duplicates in SFA database
        # create dict keyed by (type,hrn)
        self.records_by_type_hrn = dict([((record.type, record.hrn), record)
                                         for record in all_records])
        # create dict keyed by (type,pointer)
        self.records_by_type_pointer = dict([((record.type, record.pointer),
                                              record) for record in all_records
                                             if record.pointer != -1])

        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
        for record in all_records:
            record.stale = True

        # Retrieve data from the CLab testbed and create dictionaries by id
        # SITE
        sites = [shell.get_testbed_info()]

        # USERS
        users = shell.get_users({})

        #users_by_id = dict ( [ ( user['id'], user) for user in users ] )
        # KEYS
        # auth_tokens of the users. Dict (user_id:[keys])

        # NODES
        nodes = shell.get_nodes({})

        # SLICES
        slices = shell.get_slices({})

        # Import records to the SFA registry
        # SITE
        for site in sites:
            # Get hrn of the site (authority)
            site_hrn = _get_site_hrn(interface_hrn, site)
            # Try to locate the site_hrn in the SFA records
            site_record = self.locate_by_type_hrn('authority', site_hrn)

            if not site_record:
                # Create/Import record for the site authority
                try:
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)
                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    # Create record for the site authority and add it to the Registry
                    site_record = RegAuthority(
                        hrn=site_hrn,
                        gid=auth_info.get_gid_object(),
                        pointer=-1,
                        authority=get_authority(site_hrn))
                    site_record.just_created()
                    global_dbsession.add(site_record)
                    global_dbsession.commit()
                    self.logger.info(
                        "CLabImporter: imported authority (site) : %s" %
                        site_hrn)
                    self.remember_record(site_record)
                except:
                    # if the site import fails then there is no point in trying to import the
                    # site's child records (node, slices, persons), so skip them.
                    self.logger.log_exc(
                        "CLabImporter: failed to import site. Skipping child records"
                    )
                    continue
            else:
                # Authority record already in the SFA registry. Update?
                pass

            # Fresh record in SFA Registry
            site_record.stale = False

            # DEBUG
            #print '*********** ALL RECORDS ***********'
            #all_records = global_dbsession.query(RegRecord).all()
            #for record in all_records:
            #    print record

            # For the current site authority, import child entities/records

            # NODES
            for node in nodes:
                # Obtain parameters of the node: site_auth, site_name and hrn of the node
                site_auth = get_authority(site_hrn)
                site_name = site['name']
                node_hrn = hostname_to_hrn(site_hrn, node['name'])
                # Reduce hrn up to 64 characters
                if len(node_hrn) > 64: node_hrn = node_hrn[:64]

                # Try to locate the node_hrn in the SFA records
                node_record = self.locate_by_type_hrn('node', node_hrn)
                if not node_record:
                    # Create/Import record for the node
                    try:
                        # Create a keypair for the node
                        pkey = Keypair(create=True)
                        # Obtain parameters
                        urn = hrn_to_urn(node_hrn, 'node')
                        node_gid = self.auth_hierarchy.create_gid(
                            urn, create_uuid(), pkey)
                        # Create record for the node and add it to the Registry
                        node_record = RegNode(
                            hrn=node_hrn,
                            gid=node_gid,
                            pointer=node['id'],
                            authority=get_authority(node_hrn))
                        node_record.just_created()
                        global_dbsession.add(node_record)
                        global_dbsession.commit()
                        self.logger.info("CLabImporter: imported node: %s" %
                                         node_hrn)
                        self.remember_record(node_record)
                    except:
                        self.logger.log_exc(
                            "CLabImporter: failed to import node")
                else:
                    # Node record already in the SFA registry. Update?
                    pass

                # Fresh record in SFA Registry
                node_record.stale = False
                # DEBUG
                #print '*********** ALL RECORDS ***********'
                #all_records = global_dbsession.query(RegRecord).all()
                #for record in all_records:
                #    print record

            # USERS
            for user in users:
                # dummyimporter uses email... but Clab can use user['name']
                user_hrn = username_to_hrn(site_hrn, user['name'])
                # Reduce hrn up to 64 characters
                if len(user_hrn) > 64: user_hrn = user_hrn[:64]
                user_urn = hrn_to_urn(user_hrn, 'user')

                # Try to locate the user_hrn in the SFA records
                user_record = self.locate_by_type_hrn('user', user_hrn)

                # Auxiliary function to get the keypair of the user from the testbed database
                # If multiple keys, randomly pick the first key in the set
                # If no keys, generate a new keypair for the user's gird
                def init_user_key(user):
                    pubkey = None
                    pkey = None
                    if user['auth_tokens']:
                        # randomly pick first key in set
                        for key in user['auth_tokens']:
                            pubkey = key
                            try:
                                pkey = convert_public_key(pubkey)
                                break
                            except:
                                continue
                        if not pkey:
                            self.logger.warn(
                                'CLabImporter: unable to convert public key for %s'
                                % user_hrn)
                            pkey = Keypair(create=True)
                    else:
                        # the user has no keys. Creating a random keypair for the user's gid
                        self.logger.warn(
                            "CLabImporter: user %s does not have a CLab public key"
                            % user_hrn)
                        pkey = Keypair(create=True)
                    return (pubkey, pkey)

                ###########################

                try:
                    if not user_record:
                        # Create/Import record for the user
                        # Create a keypair for the node
                        (pubkey, pkey) = init_user_key(user)
                        # Obtain parameters
                        user_gid = self.auth_hierarchy.create_gid(
                            user_urn, create_uuid(), pkey)
                        user_gid.set_email("*****@*****.**" % (user['name']))
                        # Create record for the node and add it to the Registry
                        user_record = RegUser(
                            hrn=user_hrn,
                            gid=user_gid,
                            pointer=user['id'],
                            authority=get_authority(user_hrn),
                            email="*****@*****.**" % (user['name']))
                        if pubkey:
                            user_record.reg_keys = [RegKey(pubkey)]
                        else:
                            self.logger.warning("No key found for user %s" %
                                                user_hrn)
                        user_record.just_created()
                        global_dbsession.add(user_record)
                        global_dbsession.commit()
                        self.logger.info("ClabImporter: imported person: %s" %
                                         user_hrn)
                        self.remember_record(user_record)

                    else:
                        # update the record ?
                        # if user's primary key has changed then we need to update the
                        # users gid by forcing an update here
                        sfa_keys = user_record.reg_keys

                        def key_in_list(key, sfa_keys):
                            for reg_key in sfa_keys:
                                if reg_key.key == key: return True
                            return False

                        # is there a new key in Dummy TB ?
                        new_keys = False
                        for key in user['auth_tokens']:
                            if not key_in_list(key, sfa_keys):
                                new_keys = True
                        if new_keys:
                            (pubkey, pkey) = init_user_key(user)
                            user_gid = self.auth_hierarchy.create_gid(
                                user_urn, create_uuid(), pkey)
                            if not pubkey:
                                user_record.reg_keys = []
                            else:
                                user_record.reg_keys = [RegKey(pubkey)]
                            self.logger.info(
                                "CLabImporter: updated person: %s" % user_hrn)
                    user_record.email = "*****@*****.**" % (user['name'])
                    global_dbsession.commit()

                    # Fresh record in SFA Registry
                    user_record.stale = False
                except:
                    self.logger.log_exc(
                        "CLabImporter: failed to import user %d %s" %
                        (user['id'], user['name']))

            # DEBUG
            #print '*********** ALL RECORDS ***********'
            #all_records = global_dbsession.query(RegRecord).all()
            #for record in all_records:
            #    print record

            # SLICES
            for slice in slices:
                # Obtain parameters of the node: site_auth, site_name and hrn of the slice
                slice_hrn = slicename_to_hrn(site_hrn, slice['name'])
                # Try to locate the slice_hrn in the SFA records
                slice_record = self.locate_by_type_hrn('slice', slice_hrn)

                if not slice_record:
                    # Create/Import record for the slice
                    try:
                        #Create a keypair for the slice
                        pkey = Keypair(create=True)
                        # Obtain parameters
                        urn = hrn_to_urn(slice_hrn, 'slice')
                        slice_gid = self.auth_hierarchy.create_gid(
                            urn, create_uuid(), pkey)
                        # Create record for the slice and add it to the Registry
                        slice_record = RegSlice(
                            hrn=slice_hrn,
                            gid=slice_gid,
                            pointer=slice['id'],
                            authority=get_authority(slice_hrn))
                        slice_record.just_created()
                        global_dbsession.add(slice_record)
                        global_dbsession.commit()
                        self.logger.info("CLabImporter: imported slice: %s" %
                                         slice_hrn)
                        self.remember_record(slice_record)
                    except:
                        self.logger.log_exc(
                            "CLabImporter: failed to import slice")
                else:
                    # Slice record already in the SFA registry. Update?
                    self.logger.warning(
                        "Slice already existing in SFA Registry")
                    pass

                # Get current users associated with the slice
                users_of_slice = shell.get_users_by_slice(slice)
                # record current users associated with the slice
                slice_record.reg_researchers = \
                    [ self.locate_by_type_pointer ('user',user['id']) for user in users_of_slice]
                global_dbsession.commit()

                # Fresh record in SFA Registry
                slice_record.stale = False

        # Remove stale records. Old/non-fresh records that were in the SFA Registry

        # Preserve special records
        system_hrns = [
            interface_hrn, root_auth, interface_hrn + '.slicemanager'
        ]
        for record in all_records:
            if record.hrn in system_hrns:
                record.stale = False
            if record.peer_authority:
                record.stale = False

        # Remove all the records that do not have its stale parameter set to False
        for record in all_records:
            try:
                stale = record.stale
            except:
                stale = True
                self.logger.warning("stale not found with %s" % record)
            if stale:
                self.logger.info("CLabImporter: deleting stale record: %s" %
                                 record)
                global_dbsession.delete(record)
                global_dbsession.commit()

        # DEBUG
        print 'SFA REGISTRY - Result of Import:'
        all_records = global_dbsession.query(RegRecord).all()
        for record in all_records:
            print record
Esempio n. 28
0
    def run (self, options):
        config = Config ()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = DummyShell (config)

        ######## retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()

        # create hash by (type,hrn) 
        # we essentially use this to know if a given record is already known to SFA 
        self.records_by_type_hrn = \
            dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] )
        # create hash by (type,pointer) 
        self.records_by_type_pointer = \
            dict ( [ ( (record.type, record.pointer) , record ) for record in all_records 
                     if record.pointer != -1] )

        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
        for record in all_records: record.stale=True
        
        # DEBUG
        #all_records = global_dbsession.query(RegRecord).all()
        #for record in all_records: print record

        ######## retrieve Dummy TB data
        # Get all plc sites
        # retrieve only required stuf
        sites = [shell.GetTestbedInfo()]
        print "sites: " + sites
        # create a hash of sites by login_base
#        sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
        # Get all dummy TB users
        users = shell.GetUsers()
        # create a hash of users by user_id
        users_by_id = dict ( [ ( user['user_id'], user) for user in users ] )
        # Get all dummy TB public keys
        keys = []
        for user in users:
            if 'keys' in user:
                keys.extend(user['keys'])
        # create a dict user_id -> [ keys ]
        keys_by_person_id = {} 
        for user in users:
             if 'keys' in user:
                 keys_by_person_id[user['user_id']] = user['keys']
        # Get all dummy TB nodes  
        nodes = shell.GetNodes()
        # create hash by node_id
        nodes_by_id = dict ( [ ( node['node_id'], node, ) for node in nodes ] )
        # Get all dummy TB slices
        slices = shell.GetSlices()
        # create hash by slice_id
        slices_by_id = dict ( [ (slice['slice_id'], slice ) for slice in slices ] )


        # start importing
        print " STARTING FOR SITES" 
        for site in sites:
            site_hrn = _get_site_hrn(interface_hrn, site)
            # import if hrn is not in list of existing hrns or if the hrn exists
            # but its not a site record
            site_record=self.locate_by_type_hrn ('authority', site_hrn)
            print site_hrn
            print site_record
            if not site_record:
                try:
                    print "TRY TO CREATE SITE RECORD"
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        print "create auth "+urn
                        self.auth_hierarchy.create_auth(urn)
                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
                                               pointer= -1,
                                               authority=get_authority(site_hrn))
                    site_record.just_created()
                    print "urn: "+urn
                    print "auth_info: " + auth_info
                    print site_record
                    global_dbsession.add(site_record)
                    global_dbsession.commit()
                    self.logger.info("DummyImporter: imported authority (site) : %s" % site_record) 
                    self.remember_record (site_record)
                except:
                    # if the site import fails then there is no point in trying to import the
                    # site's child records (node, slices, persons), so skip them.
                    self.logger.log_exc("DummyImporter: failed to import site. Skipping child records") 
                    continue 
            else:
                # xxx update the record ...
                pass
            site_record.stale=False
             
            # import node records
            for node in nodes:
                site_auth = get_authority(site_hrn)
                site_name = site['name']
                node_hrn =  hostname_to_hrn(site_auth, site_name, node['hostname'])
                # xxx this sounds suspicious
                if len(node_hrn) > 64: node_hrn = node_hrn[:64]
                node_record = self.locate_by_type_hrn ( 'node', node_hrn )
                if not node_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(node_hrn, 'node')
                        node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        node_record = RegNode (hrn=node_hrn, gid=node_gid, 
                                               pointer =node['node_id'],
                                               authority=get_authority(node_hrn))
                        node_record.just_created()
                        global_dbsession.add(node_record)
                        global_dbsession.commit()
                        self.logger.info("DummyImporter: imported node: %s" % node_record)  
                        self.remember_record (node_record)
                    except:
                        self.logger.log_exc("DummyImporter: failed to import node") 
                else:
                    # xxx update the record ...
                    pass
                node_record.stale=False
            
            all_records = global_dbsession.query(RegRecord).all()
            for record in all_records: print record
            
            site_pis=[]
            # import users
            for user in users:
                user_hrn = email_to_hrn(site_hrn, user['email'])
                # xxx suspicious again
                if len(user_hrn) > 64: user_hrn = user_hrn[:64]
                user_urn = hrn_to_urn(user_hrn, 'user')

                user_record = self.locate_by_type_hrn ( 'user', user_hrn)

                # return a tuple pubkey (a dummy TB key object) and pkey (a Keypair object)

                def init_user_key (user):
                    pubkey = None
                    pkey = None
                    if  user['keys']:
                        # randomly pick first key in set
                        for key in user['keys']:
                             pubkey = key
                             try:
                                pkey = convert_public_key(pubkey)
                                break
                             except:
                                continue
                        if not pkey:
                            self.logger.warn('DummyImporter: unable to convert public key for %s' % user_hrn)
                            pkey = Keypair(create=True)
                    else:
                        # the user has no keys. Creating a random keypair for the user's gid
                        self.logger.warn("DummyImporter: user %s does not have a NITOS public key"%user_hrn)
                        pkey = Keypair(create=True)
                    return (pubkey, pkey)

                # new user
                try:
                    if not user_record:
                        (pubkey,pkey) = init_user_key (user)
                        user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
                        user_gid.set_email(user['email'])
                        user_record = RegUser (hrn=user_hrn, gid=user_gid, 
                                                 pointer=user['user_id'], 
                                                 authority=get_authority(user_hrn),
                                                 email=user['email'])
                        if pubkey: 
                            user_record.reg_keys=[RegKey (pubkey)]
                        else:
                            self.logger.warning("No key found for user %s"%user_record)
                        user_record.just_created()
                        global_dbsession.add (user_record)
                        global_dbsession.commit()
                        self.logger.info("DummyImporter: imported person: %s" % user_record)
                        self.remember_record ( user_record )

                    else:
                        # update the record ?
                        # if user's primary key has changed then we need to update the 
                        # users gid by forcing an update here
                        sfa_keys = user_record.reg_keys
                        def key_in_list (key,sfa_keys):
                            for reg_key in sfa_keys:
                                if reg_key.key==key: return True
                            return False
                        # is there a new key in Dummy TB ?
                        new_keys=False
                        for key in user['keys']:
                            if not key_in_list (key,sfa_keys):
                                new_keys = True
                        if new_keys:
                            (pubkey,pkey) = init_user_key (user)
                            user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
                            if not pubkey:
                                user_record.reg_keys=[]
                            else:
                                user_record.reg_keys=[ RegKey (pubkey)]
                            self.logger.info("DummyImporter: updated person: %s" % user_record)
                    user_record.email = user['email']
                    global_dbsession.commit()
                    user_record.stale=False
                except:
                    self.logger.log_exc("DummyImporter: failed to import user %d %s"%(user['user_id'],user['email']))
    

            # import slices
            for slice in slices:
                slice_hrn = slicename_to_hrn(site_hrn, slice['slice_name'])
                slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
                if not slice_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(slice_hrn, 'slice')
                        slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, 
                                                 pointer=slice['slice_id'],
                                                 authority=get_authority(slice_hrn))
                        slice_record.just_created()
                        global_dbsession.add(slice_record)
                        global_dbsession.commit()
                        self.logger.info("DummyImporter: imported slice: %s" % slice_record)  
                        self.remember_record ( slice_record )
                    except:
                        self.logger.log_exc("DummyImporter: failed to import slice")
                else:
                    # xxx update the record ...
                    self.logger.warning ("Slice update not yet implemented")
                    pass
                # record current users affiliated with the slice
                slice_record.reg_researchers = \
                    [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['user_ids'] ]
                global_dbsession.commit()
                slice_record.stale=False

        ### remove stale records
        # special records must be preserved
        system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
        for record in all_records: 
            if record.hrn in system_hrns: 
                record.stale=False
            if record.peer_authority:
                record.stale=False

        for record in all_records:
            try:        stale=record.stale
            except:     
                stale=True
                self.logger.warning("stale not found with %s"%record)
            if stale:
                self.logger.info("DummyImporter: deleting stale record: %s" % record)
                global_dbsession.delete(record)
                global_dbsession.commit()
Esempio n. 29
0
 def import_single_node(self, nodename):
     '''
     Method to import a single node from the testbed database to the SFA Registry.
     The node being imported is specified by name. 
     The method is used in the verify_node method (clab_slices.py) when a node is automatically
     created in the testbed database.
     
     :param nodename: name of the node being imported
     :type string        
     '''
     config = Config ()
     interface_hrn = config.SFA_INTERFACE_HRN
     root_auth = config.SFA_REGISTRY_ROOT_AUTH
     shell = ClabShell (config)
     
     self.logger.debug("Import Single node: %s"%nodename)
             
     # retrieve all existing SFA objects
     all_records = global_dbsession.query(RegRecord).all()
     
     # Dicts to avoid duplicates in SFA database
     # create dict keyed by (type,hrn) 
     self.records_by_type_hrn = dict([((record.type, record.hrn), record) for record in all_records ] )
     # create dict keyed by (type,pointer) 
     self.records_by_type_pointer = dict([((record.type, record.pointer), record) for record in all_records if record.pointer != -1])
     
     # Retrieve data from the CLab testbed and create dictionaries by id
     # SITE
     site = shell.get_testbed_info()
     
     # NODES
     node = shell.get_node_by(node_name=nodename)
     
     # Import records to the SFA registry
     # SITE
     # Get hrn of the site (authority)
     site_hrn = _get_site_hrn(interface_hrn, site)
     # Try to locate the site_hrn in the SFA records
     #site_record=self.locate_by_type_hrn ('authority', site_hrn)
             
     # NODE
     # Obtain parameters of the node: site_auth, site_name and hrn of the node
     site_auth = get_authority(site_hrn)
     site_name = site['name']
     node_hrn =  hostname_to_hrn(site_hrn, node['name'])
     # Reduce hrn up to 64 characters
     if len(node_hrn) > 64: node_hrn = node_hrn[:64]
     
     # Try to locate the node_hrn in the SFA records
     node_record = self.locate_by_type_hrn ('node', node_hrn )
     if not node_record:
         # Create/Import record for the node
         try:
             # Create a keypair for the node
             pkey = Keypair(create=True)
             # Obtain parameters 
             urn = hrn_to_urn(node_hrn, 'node')
             node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
             # Create record for the node and add it to the Registry
             node_record = RegNode (hrn=node_hrn, gid=node_gid, 
                                    pointer =node['id'],
                                    authority=get_authority(node_hrn))
             node_record.just_created()
             global_dbsession.add(node_record)
             global_dbsession.commit()
             self.logger.info("CLabImporter: imported node: %s" %node_hrn)  
             self.remember_record (node_record)
         except:
             self.logger.log_exc("CLabImporter: failed to import node") 
     else:
         # Node record already in the SFA registry. Update?
         pass
Esempio n. 30
0
    def import_persons_and_slices(self, testbed_shell):
        """

        Gets user data from LDAP, process the information.
        Creates hrn for the user's slice, the user's gid, creates
        the RegUser record associated with user. Creates the RegKey record
        associated nwith the user's key.
        Saves those records into the SFA DB.
        import the user's slice onto the database as well by calling
        import_slice.

        :param testbed_shell: IotlabDriver object, used to have access to
            testbed_shell attributes.
        :type testbed_shell: IotlabDriver

        .. warning:: does not support multiple keys per user
        """
        ldap_person_listdict = testbed_shell.GetPersons()
        self.logger.info("IOTLABIMPORT \t ldap_person_listdict %s \r\n" %
                         (ldap_person_listdict))

        # import persons
        for person in ldap_person_listdict:

            self.logger.info("IotlabImporter: person :" % (person))
            if 'ssh-rsa' not in person['pkey']:
                #people with invalid ssh key (ssh-dss, empty, bullshit keys...)
                #won't be imported
                continue
            person_hrn = person['hrn']
            slice_hrn = self.slicename_to_hrn(person['hrn'])

            # xxx suspicious again
            if len(person_hrn) > 64:
                person_hrn = person_hrn[:64]
            person_urn = hrn_to_urn(person_hrn, 'user')

            self.logger.info("IotlabImporter: users_rec_by_email %s " %
                             (self.users_rec_by_email))

            #Check if user using person['email'] from LDAP is already registered
            #in SFA. One email = one person. In this case, do not create another
            #record for this person
            #person_hrn returned by GetPerson based on iotlab root auth +
            #uid ldap
            user_record = self.find_record_by_type_hrn('user', person_hrn)

            if not user_record and person['email'] in self.users_rec_by_email:
                user_record = self.users_rec_by_email[person['email']]
                person_hrn = user_record.hrn
                person_urn = hrn_to_urn(person_hrn, 'user')

            slice_record = self.find_record_by_type_hrn('slice', slice_hrn)

            iotlab_key = person['pkey']
            # new person
            if not user_record:
                (pubkey, pkey) = self.init_person_key(person, iotlab_key)
                if pubkey is not None and pkey is not None:
                    person_gid = \
                        self.auth_hierarchy.create_gid(person_urn,
                                                       create_uuid(), pkey)
                    if person['email']:
                        self.logger.debug("IOTLAB IMPORTER \
                            PERSON EMAIL OK email %s " % (person['email']))
                        person_gid.set_email(person['email'])
                        user_record = \
                            RegUser(hrn=person_hrn,
                                    gid=person_gid,
                                    pointer='-1',
                                    authority=get_authority(person_hrn),
                                    email=person['email'])
                    else:
                        user_record = \
                            RegUser(hrn=person_hrn,
                                    gid=person_gid,
                                    pointer='-1',
                                    authority=get_authority(person_hrn))

                    if pubkey:
                        user_record.reg_keys = [RegKey(pubkey)]
                    else:
                        self.logger.warning("No key found for user %s" %
                                            (user_record))

                        try:
                            user_record.just_created()
                            global_dbsession.add(user_record)
                            global_dbsession.commit()
                            self.logger.info("IotlabImporter: imported person \
                                            %s" % (user_record))
                            self.update_just_added_records_dict(user_record)

                        except SQLAlchemyError:
                            self.logger.log_exc("IotlabImporter: \
                                failed to import person  %s" % (person))
            else:
                # update the record ?
                # if user's primary key has changed then we need to update
                # the users gid by forcing an update here
                sfa_keys = user_record.reg_keys

                new_key = False
                if iotlab_key is not sfa_keys:
                    new_key = True
                if new_key:
                    self.logger.info("IotlabImporter: \t \t USER UPDATE \
                        person: %s" % (person['hrn']))
                    (pubkey, pkey) = self.init_person_key(person, iotlab_key)
                    person_gid = \
                        self.auth_hierarchy.create_gid(person_urn,
                                                       create_uuid(), pkey)
                    if not pubkey:
                        user_record.reg_keys = []
                    else:
                        user_record.reg_keys = [RegKey(pubkey)]
                    self.logger.info("IotlabImporter: updated person: %s" %
                                     (user_record))

                if person['email']:
                    user_record.email = person['email']

            try:
                global_dbsession.commit()
                user_record.stale = False
            except SQLAlchemyError:
                self.logger.log_exc("IotlabImporter: \
                failed to update person  %s" % (person))

            self.import_slice(slice_hrn, slice_record, user_record)
Esempio n. 31
0
    def import_single_slice(self, slicename):
        '''
        Method to import a single slice from the testbed database to the SFA Registry.
        The slice being imported is specified by name. 
        The method is used in the verify_slice method (clab_slices.py) when a slice is automatically
        created in the testbed database.
        
        :param slicename: name of the slice being imported
        :type string        
        '''
        config = Config ()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = ClabShell (config)
        
        self.logger.debug("Import Single slice: %s"%slicename)
                
        # retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()
        
        # Dicts to avoid duplicates in SFA database
        # create dict keyed by (type,hrn) 
        self.records_by_type_hrn = dict([((record.type, record.hrn), record) for record in all_records ] )
        # create dict keyed by (type,pointer) 
        self.records_by_type_pointer = dict([((record.type, record.pointer), record) for record in all_records if record.pointer != -1])
        
        # Retrieve data from the CLab testbed and create dictionaries by id
        # SITE
        site = shell.get_testbed_info()

        # SLICES
        slice = shell.get_slice_by(slice_name=slicename)
        
        # Import records to the SFA registry
        # SITE
        # Get hrn of the site (authority)
        site_hrn = _get_site_hrn(interface_hrn, site)
        # Try to locate the site_hrn in the SFA records
        #site_record=self.locate_by_type_hrn ('authority', site_hrn)
        
        # For the current site authority, import child entities/records    
        # SLICES
        # Obtain parameters of the node: site_auth, site_name and hrn of the slice
        slice_hrn = slicename_to_hrn(slice['name'], site_hrn)
        # Try to locate the slice_hrn in the SFA records
        slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
        
        if not slice_record:
            # Create/Import record for the slice
            try:
                #Create a keypair for the slice
                pkey = Keypair(create=True)
                # Obtain parameters
                urn = hrn_to_urn(slice_hrn, 'slice')
                slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                # Create record for the slice and add it to the Registry
                slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, 
                                         pointer=slice['id'],
                                         authority=get_authority(slice_hrn))
                slice_record.just_created()
                global_dbsession.add(slice_record)
                global_dbsession.commit()
                self.logger.info("CLabImporter: imported slice: %s" % slice_hrn)  
                self.remember_record ( slice_record )
            except:
                self.logger.log_exc("CLabImporter: failed to import slice")
        else:
            # Slice record already in the SFA registry. Update?
            self.logger.warning ("Slice already existing in SFA Registry")
            pass
        
        # Get current users associated with the slice
        users_of_slice = shell.get_users_by_slice(slice)
        # record current users associated with the slice
        slice_record.reg_researchers = \
            [ self.locate_by_type_pointer ('user',user['id']) for user in users_of_slice]
        global_dbsession.commit()
Esempio n. 32
0
    def import_nodes(self, site_node_ids, nodes_by_id, testbed_shell):
        """

        Creates appropriate hostnames and RegNode records for each node in
        site_node_ids, based on the information given by the dict nodes_by_id
        that was made from data from OAR. Saves the records to the DB.

        :param site_node_ids: site's node ids
        :type site_node_ids: list of integers
        :param nodes_by_id: dictionary , key is the node id, value is the a dict
            with node information.
        :type nodes_by_id: dictionary
        :param testbed_shell: IotlabDriver object, used to have access to
            testbed_shell attributes.
        :type testbed_shell: IotlabDriver

        :returns: None
        :rtype: None

        """

        for node_id in site_node_ids:
            try:
                node = nodes_by_id[node_id]
            except KeyError:
                self.logger.warning("IotlabImporter: cannot find node_id %s \
                        - ignored" % (node_id))
                continue
            escaped_hrn =  \
                self.hostname_to_hrn_escaped(testbed_shell.root_auth,
                                             node['hostname'])
            self.logger.info("IOTLABIMPORTER node %s " % (node))
            hrn = node['hrn']

            # xxx this sounds suspicious
            if len(hrn) > 64:
                hrn = hrn[:64]
            node_record = self.find_record_by_type_hrn('node', hrn)
            if not node_record:
                pkey = Keypair(create=True)
                urn = hrn_to_urn(escaped_hrn, 'node')
                node_gid = \
                    self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)

                def testbed_get_authority(hrn):
                    """ Gets the authority part in the hrn.
                    :param hrn: hrn whose authority we are looking for.
                    :type hrn: string
                    :returns: splits the hrn using the '.' separator and returns
                        the authority part of the hrn.
                    :rtype: string

                    """
                    return hrn.split(".")[0]

                node_record = RegNode(hrn=hrn,
                                      gid=node_gid,
                                      pointer='-1',
                                      authority=testbed_get_authority(hrn))
                try:

                    node_record.just_created()
                    global_dbsession.add(node_record)
                    global_dbsession.commit()
                    self.logger.info("IotlabImporter: imported node: %s" %
                                     node_record)
                    self.update_just_added_records_dict(node_record)
                except SQLAlchemyError:
                    self.logger.log_exc(
                        "IotlabImporter: failed to import node")
            else:
                #TODO:  xxx update the record ...
                pass
            node_record.stale = False
Esempio n. 33
0
    def run (self, options):
        config = Config ()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = ClabShell (config)
                
        # retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()
        
        # Delete all default records
        #for record in all_records:
        #    global_dbsession.delete(record)
        #    global_dbsession.commit()
        #all_records = global_dbsession.query(RegRecord).all()
        
        # Dicts to avoid duplicates in SFA database
        # create dict keyed by (type,hrn) 
        self.records_by_type_hrn = dict([((record.type, record.hrn), record) for record in all_records ] )
        # create dict keyed by (type,pointer) 
        self.records_by_type_pointer = dict([((record.type, record.pointer), record) for record in all_records if record.pointer != -1])
        
        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
        for record in all_records: 
            record.stale=True
        
        # Retrieve data from the CLab testbed and create dictionaries by id
        # SITE
        sites = [shell.get_testbed_info()]
        
        # USERS
        users = shell.get_users({})
        
        #users_by_id = dict ( [ ( user['id'], user) for user in users ] )
        # KEYS
        # auth_tokens of the users. Dict (user_id:[keys])
        
        # NODES
        nodes = shell.get_nodes({})
        
        # SLICES
        slices = shell.get_slices({})
        
        
        # Import records to the SFA registry
        # SITE
        for site in sites:
            # Get hrn of the site (authority)
            site_hrn = _get_site_hrn(interface_hrn, site)
            # Try to locate the site_hrn in the SFA records
            site_record=self.locate_by_type_hrn ('authority', site_hrn)
            
            if not site_record:
                # Create/Import record for the site authority
                try:
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)
                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    # Create record for the site authority and add it to the Registry
                    site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
                                               pointer= -1,
                                               authority=get_authority(site_hrn))
                    site_record.just_created()
                    global_dbsession.add(site_record)
                    global_dbsession.commit()
                    self.logger.info("CLabImporter: imported authority (site) : %s" % site_hrn) 
                    self.remember_record (site_record)
                except:
                    # if the site import fails then there is no point in trying to import the
                    # site's child records (node, slices, persons), so skip them.
                    self.logger.log_exc("CLabImporter: failed to import site. Skipping child records") 
                    continue 
            else:
                # Authority record already in the SFA registry. Update?
                pass
            
            # Fresh record in SFA Registry
            site_record.stale=False
            
            # DEBUG
            #print '*********** ALL RECORDS ***********'
            #all_records = global_dbsession.query(RegRecord).all()
            #for record in all_records: 
            #    print record
            
             
            # For the current site authority, import child entities/records
            
            # NODES
            for node in nodes:
                # Obtain parameters of the node: site_auth, site_name and hrn of the node
                site_auth = get_authority(site_hrn)
                site_name = site['name']
                node_hrn =  hostname_to_hrn(site_hrn, node['name'])
                # Reduce hrn up to 64 characters
                if len(node_hrn) > 64: node_hrn = node_hrn[:64]
                
                # Try to locate the node_hrn in the SFA records
                node_record = self.locate_by_type_hrn ('node', node_hrn )
                if not node_record:
                    # Create/Import record for the node
                    try:
                        # Create a keypair for the node
                        pkey = Keypair(create=True)
                        # Obtain parameters 
                        urn = hrn_to_urn(node_hrn, 'node')
                        node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        # Create record for the node and add it to the Registry
                        node_record = RegNode (hrn=node_hrn, gid=node_gid, 
                                               pointer =node['id'],
                                               authority=get_authority(node_hrn))
                        node_record.just_created()
                        global_dbsession.add(node_record)
                        global_dbsession.commit()
                        self.logger.info("CLabImporter: imported node: %s" %node_hrn)  
                        self.remember_record (node_record)
                    except:
                        self.logger.log_exc("CLabImporter: failed to import node") 
                else:
                    # Node record already in the SFA registry. Update?
                    pass
                
                # Fresh record in SFA Registry
                node_record.stale=False
                # DEBUG
                #print '*********** ALL RECORDS ***********'
                #all_records = global_dbsession.query(RegRecord).all()
                #for record in all_records: 
                #    print record
                
    
            # USERS
            for user in users:
                # dummyimporter uses email... but Clab can use user['name']
                user_hrn = username_to_hrn (site_hrn, user['name'])
                # Reduce hrn up to 64 characters
                if len(user_hrn) > 64: user_hrn = user_hrn[:64]
                user_urn = hrn_to_urn(user_hrn, 'user')
                
                # Try to locate the user_hrn in the SFA records
                user_record = self.locate_by_type_hrn ('user', user_hrn)


                # Auxiliary function to get the keypair of the user from the testbed database
                # If multiple keys, randomly pick the first key in the set
                # If no keys, generate a new keypair for the user's gird
                def init_user_key (user):
                    pubkey = None
                    pkey = None
                    if  user['auth_tokens']:
                        # randomly pick first key in set
                        for key in user['auth_tokens']:
                            pubkey = key
                            try:
                                pkey = convert_public_key(pubkey)
                                break
                            except:
                                continue
                        if not pkey:
                            self.logger.warn('CLabImporter: unable to convert public key for %s' % user_hrn)
                            pkey = Keypair(create=True)
                    else:
                        # the user has no keys. Creating a random keypair for the user's gid
                        self.logger.warn("CLabImporter: user %s does not have a CLab public key"%user_hrn)
                        pkey = Keypair(create=True)
                    return (pubkey, pkey)
                ###########################
                
                try:
                    if not user_record:
                        # Create/Import record for the user
                        # Create a keypair for the node
                        (pubkey,pkey) = init_user_key (user)
                        # Obtain parameters
                        user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
                        user_gid.set_email("*****@*****.**"%(user['name']))
                        # Create record for the node and add it to the Registry
                        user_record = RegUser (hrn=user_hrn, gid=user_gid, 
                                                 pointer=user['id'], 
                                                 authority=get_authority(user_hrn),
                                                 email="*****@*****.**"%(user['name']))
                        if pubkey: 
                            user_record.reg_keys=[RegKey (pubkey)]
                        else:
                            self.logger.warning("No key found for user %s"%user_hrn)
                        user_record.just_created()
                        global_dbsession.add (user_record)
                        global_dbsession.commit()
                        self.logger.info("ClabImporter: imported person: %s" % user_hrn)
                        self.remember_record ( user_record )

                    else:
                        # update the record ?
                        # if user's primary key has changed then we need to update the 
                        # users gid by forcing an update here
                        sfa_keys = user_record.reg_keys
                        def key_in_list (key,sfa_keys):
                            for reg_key in sfa_keys:
                                if reg_key.key==key: return True
                            return False
                        # is there a new key in Dummy TB ?
                        new_keys=False
                        for key in user['auth_tokens']:
                            if not key_in_list (key,sfa_keys):
                                new_keys = True
                        if new_keys:
                            (pubkey,pkey) = init_user_key (user)
                            user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
                            if not pubkey:
                                user_record.reg_keys=[]
                            else:
                                user_record.reg_keys=[ RegKey (pubkey)]
                            self.logger.info("CLabImporter: updated person: %s" % user_hrn)
                    user_record.email = "*****@*****.**"%(user['name'])
                    global_dbsession.commit()
                                        
                    # Fresh record in SFA Registry
                    user_record.stale=False
                except:
                    self.logger.log_exc("CLabImporter: failed to import user %d %s"%(user['id'],user['name']))
            
            # DEBUG
                #print '*********** ALL RECORDS ***********'
                #all_records = global_dbsession.query(RegRecord).all()
                #for record in all_records: 
                #    print record         
                    
            # SLICES
            for slice in slices:
                # Obtain parameters of the node: site_auth, site_name and hrn of the slice
                slice_hrn = slicename_to_hrn(site_hrn, slice['name'])
                # Try to locate the slice_hrn in the SFA records
                slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
                
                if not slice_record:
                    # Create/Import record for the slice
                    try:
                        #Create a keypair for the slice
                        pkey = Keypair(create=True)
                        # Obtain parameters
                        urn = hrn_to_urn(slice_hrn, 'slice')
                        slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        # Create record for the slice and add it to the Registry
                        slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, 
                                                 pointer=slice['id'],
                                                 authority=get_authority(slice_hrn))
                        slice_record.just_created()
                        global_dbsession.add(slice_record)
                        global_dbsession.commit()
                        self.logger.info("CLabImporter: imported slice: %s" % slice_hrn)  
                        self.remember_record ( slice_record )
                    except:
                        self.logger.log_exc("CLabImporter: failed to import slice")
                else:
                    # Slice record already in the SFA registry. Update?
                    self.logger.warning ("Slice already existing in SFA Registry")
                    pass
                
                # Get current users associated with the slice
                users_of_slice = shell.get_users_by_slice(slice)
                # record current users associated with the slice
                slice_record.reg_researchers = \
                    [ self.locate_by_type_pointer ('user',user['id']) for user in users_of_slice]
                global_dbsession.commit()
                                
                # Fresh record in SFA Registry 
                slice_record.stale=False    
                
     
        # Remove stale records. Old/non-fresh records that were in the SFA Registry
        
        # Preserve special records 
        system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
        for record in all_records: 
            if record.hrn in system_hrns: 
                record.stale=False
            if record.peer_authority:
                record.stale=False
                
        # Remove all the records that do not have its stale parameter set to False
        for record in all_records:
            try:
                stale=record.stale
            except:     
                stale=True
                self.logger.warning("stale not found with %s"%record)
            if stale:
                self.logger.info("CLabImporter: deleting stale record: %s" % record)
                global_dbsession.delete(record)
                global_dbsession.commit()
                
        # DEBUG
        print 'SFA REGISTRY - Result of Import:'
        all_records = global_dbsession.query(RegRecord).all()
        for record in all_records: 
            print record