Exemple #1
0
 def create_special_vini_record(self, interface_hrn):
     # special case for vini
     if ".vini" in interface_hrn and interface_hrn.endswith('vini'):
         # create a fake internet2 site first
         i2site = {
             'name': 'Internet2',
             'login_base': 'internet2',
             'site_id': -1
         }
         site_hrn = _get_site_hrn(interface_hrn, i2site)
         # import if hrn is not in list of existing hrns or if the hrn exists
         # but its not a site record
         if (
                 'authority',
                 site_hrn,
         ) not in self.records_by_type_hrn:
             urn = hrn_to_urn(site_hrn, 'authority')
             if not self.auth_hierarchy.auth_exists(urn):
                 self.auth_hierarchy.create_auth(urn)
             auth_info = self.auth_hierarchy.get_auth_info(urn)
             auth_record = RegAuthority(hrn=site_hrn,
                                        gid=auth_info.get_gid_object(),
                                        pointer=site['site_id'],
                                        authority=get_authority(site_hrn))
             auth_record.just_created()
             global_dbsession.add(auth_record)
             global_dbsession.commit()
             self.logger.info(
                 "PlImporter: Imported authority (vini site) %s" %
                 auth_record)
             self.remember_record(site_record)
Exemple #2
0
 def create_interface_records(self):
     """
     Create a record for each SFA interface
     """
     # just create certs for all sfa interfaces even if they
     # aren't enabled
     auth_info = self.auth_hierarchy.get_auth_info(
         self.config.SFA_INTERFACE_HRN)
     pkey = auth_info.get_pkey_object()
     hrn = self.config.SFA_INTERFACE_HRN
     for type in [
             'authority+sa',
             'authority+am',
             'authority+sm',
     ]:
         urn = hrn_to_urn(hrn, type)
         gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
         # for now we have to preserve the authority+<> stuff
         if self.record_exists(type, hrn): continue
         interface_record = RegAuthority(type=type,
                                         hrn=hrn,
                                         gid=gid,
                                         authority=get_authority(hrn))
         interface_record.just_created()
         global_dbsession.add(interface_record)
         global_dbsession.commit()
         self.logger.info("SfaImporter: imported authority (%s) %s " %
                          (type, interface_record))
Exemple #3
0
    def import_sites_and_nodes(self, testbed_shell):
        """

        Gets all the sites and nodes from OAR, process the information,
        creates hrns and RegAuthority for sites, and feed them to the database.
        For each site, import the site's nodes to the DB by calling
        import_nodes.

        :param testbed_shell: IotlabDriver object, used to have access to
            testbed_shell methods and fetching info on sites and nodes.
        :type testbed_shell: IotlabDriver
        """

        sites_listdict = testbed_shell.GetSites()
        nodes_listdict = testbed_shell.GetNodes()
        nodes_by_id = dict([(node['node_id'], node)
                            for node in nodes_listdict])
        for site in sites_listdict:
            site_hrn = site['name']
            site_record = self.find_record_by_type_hrn('authority', site_hrn)
            self.logger.info("IotlabImporter: import_sites_and_nodes \
                                    (site) %s \r\n " % site_record)
            if not site_record:
                try:
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)

                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    site_record = \
                        RegAuthority(hrn=site_hrn,
                                     gid=auth_info.get_gid_object(),
                                     pointer='-1',
                                     authority=get_authority(site_hrn))
                    site_record.just_created()
                    global_dbsession.add(site_record)
                    global_dbsession.commit()
                    self.logger.info("IotlabImporter: imported authority \
                                    (site) %s" % site_record)
                    self.update_just_added_records_dict(site_record)
                except SQLAlchemyError:
                    # if the site import fails then there is no point in
                    # trying to import the
                    # site's child records(node, slices, persons), so skip them.
                    self.logger.log_exc("IotlabImporter: failed to import \
                        site. Skipping child records")
                    continue
            else:
                # xxx update the record ...
                pass

            site_record.stale = False
            self.import_nodes(site['node_ids'], nodes_by_id, testbed_shell)

        return
Exemple #4
0
    def import_sites_and_nodes(self, testbed_shell):
        """

        Gets all the sites and nodes from OAR, process the information,
        creates hrns and RegAuthority for sites, and feed them to the database.
        For each site, import the site's nodes to the DB by calling
        import_nodes.

        :param testbed_shell: IotlabDriver object, used to have access to
            testbed_shell methods and fetching info on sites and nodes.
        :type testbed_shell: IotlabDriver
        """

        sites_listdict = testbed_shell.GetSites()
        nodes_listdict = testbed_shell.GetNodes()
        nodes_by_id = dict([(node['node_id'], node) for node in nodes_listdict])
        for site in sites_listdict:
            site_hrn = site['name']
            site_record = self.find_record_by_type_hrn ('authority', site_hrn)
            self.logger.info("IotlabImporter: import_sites_and_nodes \
                                    (site) %s \r\n " % site_record)
            if not site_record:
                try:
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)

                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    site_record = \
                        RegAuthority(hrn=site_hrn,
                                     gid=auth_info.get_gid_object(),
                                     pointer='-1',
                                     authority=get_authority(site_hrn))
                    site_record.just_created()
                    global_dbsession.add(site_record)
                    global_dbsession.commit()
                    self.logger.info("IotlabImporter: imported authority \
                                    (site) %s" % site_record)
                    self.update_just_added_records_dict(site_record)
                except SQLAlchemyError:
                    # if the site import fails then there is no point in
                    # trying to import the
                    # site's child records(node, slices, persons), so skip them.
                    self.logger.log_exc("IotlabImporter: failed to import \
                        site. Skipping child records")
                    continue
            else:
                # xxx update the record ...
                pass

            site_record.stale = False
            self.import_nodes(site['node_ids'], nodes_by_id, testbed_shell)

        return
Exemple #5
0
    def import_tenants(self, existing_hrns, existing_records):
        # Get all tenants
        # A tenant can represent an organizational group (site) or a
        # slice. If a tenant's authorty/parent matches the root authority it is
        # considered a group/site. All other tenants are considered slices.
        tenants = self.shell.auth_manager.tenants.list()
        tenants_dict = {}
        for tenant in tenants:
            hrn = self.config.SFA_INTERFACE_HRN + '.' + tenant.name
            tenants_dict[hrn] = tenant
            authority_hrn = OSXrn(xrn=hrn, type='authority').get_authority_hrn()

            if hrn in existing_hrns:
                continue

            if authority_hrn == self.config.SFA_INTERFACE_HRN:
                # import group/site
                record = RegAuthority()
                urn = OSXrn(xrn=hrn, type='authority').get_urn()
                if not self.auth_hierarchy.auth_exists(urn):
                    self.auth_hierarchy.create_auth(urn)
                auth_info = self.auth_hierarchy.get_auth_info(urn)
                gid = auth_info.get_gid_object()
                record.type='authority'
                record.hrn=hrn
                record.gid=gid
                record.authority=get_authority(hrn)
                dbsession.add(record)
                dbsession.commit()
                self.logger.info("OpenstackImporter: imported authority: %s" % record)

            else:
                record = RegSlice ()
                urn = OSXrn(xrn=hrn, type='slice').get_urn()
                pkey = Keypair(create=True)
                gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                record.type='slice'
                record.hrn=hrn
                record.gid=gid
                record.authority=get_authority(hrn)
                dbsession.add(record)
                dbsession.commit()
                self.logger.info("OpenstackImporter: imported slice: %s" % record) 

        return tenants_dict
Exemple #6
0
 def create_interface_records(self):
     """
     Create a record for each SFA interface
     """
     # just create certs for all sfa interfaces even if they
     # aren't enabled
     auth_info = self.auth_hierarchy.get_auth_info(self.config.SFA_INTERFACE_HRN)
     pkey = auth_info.get_pkey_object()
     hrn=self.config.SFA_INTERFACE_HRN
     for type in  [ 'authority+sa', 'authority+am', 'authority+sm', ]:
         urn = hrn_to_urn(hrn, type)
         gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
         # for now we have to preserve the authority+<> stuff
         if self.record_exists (type,hrn): continue
         interface_record = RegAuthority(type=type, hrn=hrn, gid=gid,
                                         authority=get_authority(hrn))
         interface_record.just_created()
         dbsession.add (interface_record)
         dbsession.commit()
         self.logger.info("SfaImporter: imported authority (%s) %s " % (type,interface_record))
Exemple #7
0
 def create_special_vini_record (self, interface_hrn):
     # special case for vini
     if ".vini" in interface_hrn and interface_hrn.endswith('vini'):
         # create a fake internet2 site first
         i2site = {'name': 'Internet2', 'login_base': 'internet2', 'site_id': -1}
         site_hrn = _get_site_hrn(interface_hrn, i2site)
         # import if hrn is not in list of existing hrns or if the hrn exists
         # but its not a site record
         if ( 'authority', site_hrn, ) not in self.records_by_type_hrn:
             urn = hrn_to_urn(site_hrn, 'authority')
             if not self.auth_hierarchy.auth_exists(urn):
                 self.auth_hierarchy.create_auth(urn)
             auth_info = self.auth_hierarchy.get_auth_info(urn)
             auth_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
                                        pointer=site['site_id'],
                                        authority=get_authority(site_hrn))
             auth_record.just_created()
             global_dbsession.add(auth_record)
             global_dbsession.commit()
             self.logger.info("PlImporter: Imported authority (vini site) %s"%auth_record)
             self.remember_record ( site_record )
Exemple #8
0
    def create_top_level_auth_records(self, hrn):
        """
        Create top level db records (includes root and sub authorities (local/remote)
        """
        # make sure parent exists
        parent_hrn = get_authority(hrn)
        if not parent_hrn:
            parent_hrn = hrn
        if not parent_hrn == hrn:
            self.create_top_level_auth_records(parent_hrn)

        # ensure key and cert exists:
        self.auth_hierarchy.create_top_level_auth(hrn)
        # create the db record if it doesnt already exist
        if not self.record_exists ('authority',hrn):
            auth_info = self.auth_hierarchy.get_auth_info(hrn)
            auth_record = RegAuthority(hrn=hrn, gid=auth_info.get_gid_object(),
                                       authority=get_authority(hrn))
            auth_record.just_created()
            dbsession.add (auth_record)
            dbsession.commit()
            self.logger.info("SfaImporter: imported authority (parent) %s " % auth_record)     
Exemple #9
0
    def create_top_level_auth_records(self, hrn):
        """
        Create top level db records (includes root and sub authorities (local/remote)
        """
        # make sure parent exists
        parent_hrn = get_authority(hrn)
        if not parent_hrn:
            parent_hrn = hrn
        if not parent_hrn == hrn:
            self.create_top_level_auth_records(parent_hrn)

        # ensure key and cert exists:
        self.auth_hierarchy.create_top_level_auth(hrn)
        # create the db record if it doesnt already exist
        if not self.record_exists('authority', hrn):
            auth_info = self.auth_hierarchy.get_auth_info(hrn)
            auth_record = RegAuthority(hrn=hrn,
                                       gid=auth_info.get_gid_object(),
                                       authority=get_authority(hrn))
            auth_record.just_created()
            global_dbsession.add(auth_record)
            global_dbsession.commit()
            self.logger.info("SfaImporter: imported authority (parent) %s " %
                             auth_record)
Exemple #10
0
    def run (self, options):
        config = Config ()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = DummyShell (config)

        ######## retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()

        # create hash by (type,hrn) 
        # we essentially use this to know if a given record is already known to SFA 
        self.records_by_type_hrn = \
            dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] )
        # create hash by (type,pointer) 
        self.records_by_type_pointer = \
            dict ( [ ( (record.type, record.pointer) , record ) for record in all_records 
                     if record.pointer != -1] )

        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
        for record in all_records: record.stale=True
        
        # DEBUG
        #all_records = global_dbsession.query(RegRecord).all()
        #for record in all_records: print record

        ######## retrieve Dummy TB data
        # Get all plc sites
        # retrieve only required stuf
        sites = [shell.GetTestbedInfo()]
        print "sites: " + sites
        # create a hash of sites by login_base
#        sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
        # Get all dummy TB users
        users = shell.GetUsers()
        # create a hash of users by user_id
        users_by_id = dict ( [ ( user['user_id'], user) for user in users ] )
        # Get all dummy TB public keys
        keys = []
        for user in users:
            if 'keys' in user:
                keys.extend(user['keys'])
        # create a dict user_id -> [ keys ]
        keys_by_person_id = {} 
        for user in users:
             if 'keys' in user:
                 keys_by_person_id[user['user_id']] = user['keys']
        # Get all dummy TB nodes  
        nodes = shell.GetNodes()
        # create hash by node_id
        nodes_by_id = dict ( [ ( node['node_id'], node, ) for node in nodes ] )
        # Get all dummy TB slices
        slices = shell.GetSlices()
        # create hash by slice_id
        slices_by_id = dict ( [ (slice['slice_id'], slice ) for slice in slices ] )


        # start importing
        print " STARTING FOR SITES" 
        for site in sites:
            site_hrn = _get_site_hrn(interface_hrn, site)
            # import if hrn is not in list of existing hrns or if the hrn exists
            # but its not a site record
            site_record=self.locate_by_type_hrn ('authority', site_hrn)
            print site_hrn
            print site_record
            if not site_record:
                try:
                    print "TRY TO CREATE SITE RECORD"
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        print "create auth "+urn
                        self.auth_hierarchy.create_auth(urn)
                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
                                               pointer= -1,
                                               authority=get_authority(site_hrn))
                    site_record.just_created()
                    print "urn: "+urn
                    print "auth_info: " + auth_info
                    print site_record
                    global_dbsession.add(site_record)
                    global_dbsession.commit()
                    self.logger.info("DummyImporter: imported authority (site) : %s" % site_record) 
                    self.remember_record (site_record)
                except:
                    # if the site import fails then there is no point in trying to import the
                    # site's child records (node, slices, persons), so skip them.
                    self.logger.log_exc("DummyImporter: failed to import site. Skipping child records") 
                    continue 
            else:
                # xxx update the record ...
                pass
            site_record.stale=False
             
            # import node records
            for node in nodes:
                site_auth = get_authority(site_hrn)
                site_name = site['name']
                node_hrn =  hostname_to_hrn(site_auth, site_name, node['hostname'])
                # xxx this sounds suspicious
                if len(node_hrn) > 64: node_hrn = node_hrn[:64]
                node_record = self.locate_by_type_hrn ( 'node', node_hrn )
                if not node_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(node_hrn, 'node')
                        node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        node_record = RegNode (hrn=node_hrn, gid=node_gid, 
                                               pointer =node['node_id'],
                                               authority=get_authority(node_hrn))
                        node_record.just_created()
                        global_dbsession.add(node_record)
                        global_dbsession.commit()
                        self.logger.info("DummyImporter: imported node: %s" % node_record)  
                        self.remember_record (node_record)
                    except:
                        self.logger.log_exc("DummyImporter: failed to import node") 
                else:
                    # xxx update the record ...
                    pass
                node_record.stale=False
            
            all_records = global_dbsession.query(RegRecord).all()
            for record in all_records: print record
            
            site_pis=[]
            # import users
            for user in users:
                user_hrn = email_to_hrn(site_hrn, user['email'])
                # xxx suspicious again
                if len(user_hrn) > 64: user_hrn = user_hrn[:64]
                user_urn = hrn_to_urn(user_hrn, 'user')

                user_record = self.locate_by_type_hrn ( 'user', user_hrn)

                # return a tuple pubkey (a dummy TB key object) and pkey (a Keypair object)

                def init_user_key (user):
                    pubkey = None
                    pkey = None
                    if  user['keys']:
                        # randomly pick first key in set
                        for key in user['keys']:
                             pubkey = key
                             try:
                                pkey = convert_public_key(pubkey)
                                break
                             except:
                                continue
                        if not pkey:
                            self.logger.warn('DummyImporter: unable to convert public key for %s' % user_hrn)
                            pkey = Keypair(create=True)
                    else:
                        # the user has no keys. Creating a random keypair for the user's gid
                        self.logger.warn("DummyImporter: user %s does not have a NITOS public key"%user_hrn)
                        pkey = Keypair(create=True)
                    return (pubkey, pkey)

                # new user
                try:
                    if not user_record:
                        (pubkey,pkey) = init_user_key (user)
                        user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
                        user_gid.set_email(user['email'])
                        user_record = RegUser (hrn=user_hrn, gid=user_gid, 
                                                 pointer=user['user_id'], 
                                                 authority=get_authority(user_hrn),
                                                 email=user['email'])
                        if pubkey: 
                            user_record.reg_keys=[RegKey (pubkey)]
                        else:
                            self.logger.warning("No key found for user %s"%user_record)
                        user_record.just_created()
                        global_dbsession.add (user_record)
                        global_dbsession.commit()
                        self.logger.info("DummyImporter: imported person: %s" % user_record)
                        self.remember_record ( user_record )

                    else:
                        # update the record ?
                        # if user's primary key has changed then we need to update the 
                        # users gid by forcing an update here
                        sfa_keys = user_record.reg_keys
                        def key_in_list (key,sfa_keys):
                            for reg_key in sfa_keys:
                                if reg_key.key==key: return True
                            return False
                        # is there a new key in Dummy TB ?
                        new_keys=False
                        for key in user['keys']:
                            if not key_in_list (key,sfa_keys):
                                new_keys = True
                        if new_keys:
                            (pubkey,pkey) = init_user_key (user)
                            user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
                            if not pubkey:
                                user_record.reg_keys=[]
                            else:
                                user_record.reg_keys=[ RegKey (pubkey)]
                            self.logger.info("DummyImporter: updated person: %s" % user_record)
                    user_record.email = user['email']
                    global_dbsession.commit()
                    user_record.stale=False
                except:
                    self.logger.log_exc("DummyImporter: failed to import user %d %s"%(user['user_id'],user['email']))
    

            # import slices
            for slice in slices:
                slice_hrn = slicename_to_hrn(site_hrn, slice['slice_name'])
                slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
                if not slice_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(slice_hrn, 'slice')
                        slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, 
                                                 pointer=slice['slice_id'],
                                                 authority=get_authority(slice_hrn))
                        slice_record.just_created()
                        global_dbsession.add(slice_record)
                        global_dbsession.commit()
                        self.logger.info("DummyImporter: imported slice: %s" % slice_record)  
                        self.remember_record ( slice_record )
                    except:
                        self.logger.log_exc("DummyImporter: failed to import slice")
                else:
                    # xxx update the record ...
                    self.logger.warning ("Slice update not yet implemented")
                    pass
                # record current users affiliated with the slice
                slice_record.reg_researchers = \
                    [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['user_ids'] ]
                global_dbsession.commit()
                slice_record.stale=False

        ### remove stale records
        # special records must be preserved
        system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
        for record in all_records: 
            if record.hrn in system_hrns: 
                record.stale=False
            if record.peer_authority:
                record.stale=False

        for record in all_records:
            try:        stale=record.stale
            except:     
                stale=True
                self.logger.warning("stale not found with %s"%record)
            if stale:
                self.logger.info("DummyImporter: deleting stale record: %s" % record)
                global_dbsession.delete(record)
                global_dbsession.commit()
Exemple #11
0
    def run(self, options):
        config = Config()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = PlShell(config)

        ######## retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()

        # create hash by (type,hrn)
        # we essentially use this to know if a given record is already known to SFA
        self.records_by_type_hrn = \
            dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] )
        # create hash by (type,pointer)
        self.records_by_type_pointer = \
            dict ( [ ( (record.type, record.pointer) , record ) for record in all_records
                     if record.pointer != -1] )

        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
        for record in all_records:
            record.stale = True

        ######## retrieve PLC data
        # Get all plc sites
        # retrieve only required stuf
        sites = shell.GetSites({
            'peer_id': None,
            'enabled': True
        }, [
            'site_id', 'login_base', 'node_ids', 'slice_ids', 'person_ids',
            'name'
        ])
        # create a hash of sites by login_base
        #        sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
        # Get all plc users
        persons = shell.GetPersons({
            'peer_id': None,
            'enabled': True
        }, ['person_id', 'email', 'key_ids', 'site_ids', 'role_ids'])
        # create a hash of persons by person_id
        persons_by_id = dict([(person['person_id'], person)
                              for person in persons])
        # also gather non-enabled user accounts so as to issue relevant warnings
        disabled_persons = shell.GetPersons({
            'peer_id': None,
            'enabled': False
        }, ['person_id'])
        disabled_person_ids = [
            person['person_id'] for person in disabled_persons
        ]
        # Get all plc public keys
        # accumulate key ids for keys retrieval
        key_ids = []
        for person in persons:
            key_ids.extend(person['key_ids'])
        keys = shell.GetKeys({
            'peer_id': None,
            'key_id': key_ids,
            'key_type': 'ssh'
        })
        # create a hash of keys by key_id
        keys_by_id = dict([(key['key_id'], key) for key in keys])
        # create a dict person_id -> [ (plc)keys ]
        keys_by_person_id = {}
        for person in persons:
            pubkeys = []
            for key_id in person['key_ids']:
                # by construction all the keys we fetched are ssh keys
                # so gpg keys won't be in there
                try:
                    key = keys_by_id[key_id]
                    pubkeys.append(key)
                except:
                    self.logger.warning(
                        "Could not spot key %d - probably non-ssh" % key_id)
            keys_by_person_id[person['person_id']] = pubkeys
        # Get all plc nodes
        nodes = shell.GetNodes({'peer_id': None},
                               ['node_id', 'hostname', 'site_id'])
        # create hash by node_id
        nodes_by_id = dict([(
            node['node_id'],
            node,
        ) for node in nodes])
        # Get all plc slices
        slices = shell.GetSlices({'peer_id': None},
                                 ['slice_id', 'name', 'person_ids'])
        # create hash by slice_id
        slices_by_id = dict([(slice['slice_id'], slice) for slice in slices])

        # isolate special vini case in separate method
        self.create_special_vini_record(interface_hrn)

        # start importing
        for site in sites:
            try:
                site_sfa_created = shell.GetSiteSfaCreated(site['site_id'])
            except:
                site_sfa_created = None
            if site['name'].startswith('sfa:') or site_sfa_created == 'True':
                continue

            site_hrn = _get_site_hrn(interface_hrn, site)
            # import if hrn is not in list of existing hrns or if the hrn exists
            # but its not a site record
            site_record = self.locate_by_type_hrn('authority', site_hrn)
            if not site_record:
                try:
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)
                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    site_record = RegAuthority(
                        hrn=site_hrn,
                        gid=auth_info.get_gid_object(),
                        pointer=site['site_id'],
                        authority=get_authority(site_hrn))
                    site_record.just_created()
                    global_dbsession.add(site_record)
                    global_dbsession.commit()
                    self.logger.info(
                        "PlImporter: imported authority (site) : %s" %
                        site_record)
                    self.remember_record(site_record)
                except:
                    # if the site import fails then there is no point in trying to import the
                    # site's child records (node, slices, persons), so skip them.
                    self.logger.log_exc(
                        "PlImporter: failed to import site %s. Skipping child records"
                        % site_hrn)
                    continue
            else:
                # xxx update the record ...
                pass
            site_record.stale = False

            # import node records
            for node_id in site['node_ids']:
                try:
                    node = nodes_by_id[node_id]
                except:
                    self.logger.warning(
                        "PlImporter: cannot find node_id %s - ignored" %
                        node_id)
                    continue
                site_auth = get_authority(site_hrn)
                site_name = site['login_base']
                node_hrn = hostname_to_hrn(site_auth, site_name,
                                           node['hostname'])
                # xxx this sounds suspicious
                if len(node_hrn) > 64: node_hrn = node_hrn[:64]
                node_record = self.locate_by_type_hrn('node', node_hrn)
                if not node_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(node_hrn, 'node')
                        node_gid = self.auth_hierarchy.create_gid(
                            urn, create_uuid(), pkey)
                        node_record = RegNode(
                            hrn=node_hrn,
                            gid=node_gid,
                            pointer=node['node_id'],
                            authority=get_authority(node_hrn))
                        node_record.just_created()
                        global_dbsession.add(node_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported node: %s" %
                                         node_record)
                        self.remember_record(node_record)
                    except:
                        self.logger.log_exc(
                            "PlImporter: failed to import node %s" % node_hrn)
                        continue
                else:
                    # xxx update the record ...
                    pass
                node_record.stale = False

            site_pis = []
            # import persons
            for person_id in site['person_ids']:
                proceed = False
                if person_id in persons_by_id:
                    person = persons_by_id[person_id]
                    proceed = True
                elif person_id in disabled_person_ids:
                    pass
                else:
                    self.logger.warning(
                        "PlImporter: cannot locate person_id %s in site %s - ignored"
                        % (person_id, site_hrn))
                # make sure to NOT run this if anything is wrong
                if not proceed: continue

                person_hrn = email_to_hrn(site_hrn, person['email'])
                # xxx suspicious again
                if len(person_hrn) > 64: person_hrn = person_hrn[:64]
                person_urn = hrn_to_urn(person_hrn, 'user')

                user_record = self.locate_by_type_hrn('user', person_hrn)

                # return a tuple pubkey (a plc key object) and pkey (a Keypair object)
                def init_person_key(person, plc_keys):
                    pubkey = None
                    if person['key_ids']:
                        # randomly pick first key in set
                        pubkey = plc_keys[0]
                        try:
                            pkey = convert_public_key(pubkey['key'])
                        except:
                            self.logger.warn(
                                'PlImporter: unable to convert public key for %s'
                                % person_hrn)
                            pkey = Keypair(create=True)
                    else:
                        # the user has no keys. Creating a random keypair for the user's gid
                        self.logger.warn(
                            "PlImporter: person %s does not have a PL public key"
                            % person_hrn)
                        pkey = Keypair(create=True)
                    return (pubkey, pkey)

                # new person
                try:
                    plc_keys = keys_by_person_id.get(person['person_id'], [])
                    if not user_record:
                        (pubkey, pkey) = init_person_key(person, plc_keys)
                        person_gid = self.auth_hierarchy.create_gid(
                            person_urn,
                            create_uuid(),
                            pkey,
                            email=person['email'])
                        user_record = RegUser(
                            hrn=person_hrn,
                            gid=person_gid,
                            pointer=person['person_id'],
                            authority=get_authority(person_hrn),
                            email=person['email'])
                        if pubkey:
                            user_record.reg_keys = [
                                RegKey(pubkey['key'], pubkey['key_id'])
                            ]
                        else:
                            self.logger.warning("No key found for user %s" %
                                                user_record)
                        user_record.just_created()
                        global_dbsession.add(user_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported person: %s" %
                                         user_record)
                        self.remember_record(user_record)
                    else:
                        # update the record ?
                        #
                        # if a user key has changed then we need to update the
                        # users gid by forcing an update here
                        #
                        # right now, SFA only has *one* key attached to a user, and this is
                        # the key that the GID was made with
                        # so the logic here is, we consider that things are OK (unchanged) if
                        # all the SFA keys are present as PLC keys
                        # otherwise we trigger the creation of a new gid from *some* plc key
                        # and record this on the SFA side
                        # it would make sense to add a feature in PLC so that one could pick a 'primary'
                        # key but this is not available on the myplc side for now
                        # = or = it would be much better to support several keys in SFA but that
                        # does not seem doable without a major overhaul in the data model as
                        # a GID is attached to a hrn, but it's also linked to a key, so...
                        # NOTE: with this logic, the first key entered in PLC remains the one
                        # current in SFA until it is removed from PLC
                        sfa_keys = user_record.reg_keys

                        def sfa_key_in_list(sfa_key, plc_keys):
                            for plc_key in plc_keys:
                                if plc_key['key'] == sfa_key.key:
                                    return True
                            return False

                        # are all the SFA keys known to PLC ?
                        new_keys = False
                        if not sfa_keys and plc_keys:
                            new_keys = True
                        else:
                            for sfa_key in sfa_keys:
                                if not sfa_key_in_list(sfa_key, plc_keys):
                                    new_keys = True
                        if new_keys:
                            (pubkey, pkey) = init_person_key(person, plc_keys)
                            person_gid = self.auth_hierarchy.create_gid(
                                person_urn, create_uuid(), pkey)
                            person_gid.set_email(person['email'])
                            if not pubkey:
                                user_record.reg_keys = []
                            else:
                                user_record.reg_keys = [
                                    RegKey(pubkey['key'], pubkey['key_id'])
                                ]
                            user_record.gid = person_gid
                            user_record.just_updated()
                            self.logger.info("PlImporter: updated person: %s" %
                                             user_record)
                    user_record.email = person['email']
                    global_dbsession.commit()
                    user_record.stale = False
                    # accumulate PIs - PLCAPI has a limitation that when someone has PI role
                    # this is valid for all sites she is in..
                    # PI is coded with role_id==20
                    if 20 in person['role_ids']:
                        site_pis.append(user_record)
                except:
                    self.logger.log_exc(
                        "PlImporter: failed to import person %d %s" %
                        (person['person_id'], person['email']))

            # maintain the list of PIs for a given site
            # for the record, Jordan had proposed the following addition as a welcome hotfix to a previous version:
            # site_pis = list(set(site_pis))
            # this was likely due to a bug in the above logic, that had to do with disabled persons
            # being improperly handled, and where the whole loop on persons
            # could be performed twice with the same person...
            # so hopefully we do not need to eliminate duplicates explicitly here anymore
            site_record.reg_pis = list(set(site_pis))
            global_dbsession.commit()

            # import slices
            for slice_id in site['slice_ids']:
                try:
                    slice = slices_by_id[slice_id]
                except:
                    self.logger.warning(
                        "PlImporter: cannot locate slice_id %s - ignored" %
                        slice_id)
                slice_hrn = slicename_to_hrn(interface_hrn, slice['name'])
                slice_record = self.locate_by_type_hrn('slice', slice_hrn)
                if not slice_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(slice_hrn, 'slice')
                        slice_gid = self.auth_hierarchy.create_gid(
                            urn, create_uuid(), pkey)
                        slice_record = RegSlice(
                            hrn=slice_hrn,
                            gid=slice_gid,
                            pointer=slice['slice_id'],
                            authority=get_authority(slice_hrn))
                        slice_record.just_created()
                        global_dbsession.add(slice_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported slice: %s" %
                                         slice_record)
                        self.remember_record(slice_record)
                    except:
                        self.logger.log_exc(
                            "PlImporter: failed to import slice %s (%s)" %
                            (slice_hrn, slice['name']))
                else:
                    # xxx update the record ...
                    # given that we record the current set of users anyways, there does not seem to be much left to do here
                    # self.logger.warning ("Slice update not yet implemented on slice %s (%s)"%(slice_hrn,slice['name']))
                    pass
                # record current users affiliated with the slice
                slice_record.reg_researchers = \
                    [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['person_ids'] ]
                global_dbsession.commit()
                slice_record.stale = False

        ### remove stale records
        # special records must be preserved
        system_hrns = [
            interface_hrn, root_auth, interface_hrn + '.slicemanager'
        ]
        for record in all_records:
            if record.hrn in system_hrns:
                record.stale = False
            if record.peer_authority:
                record.stale = False
            if ".vini" in interface_hrn and interface_hrn.endswith('vini') and \
                record.hrn.endswith("internet2"):
                record.stale = False

        for record in all_records:
            try:
                stale = record.stale
            except:
                stale = True
                self.logger.warning("stale not found with %s" % record)
            if stale:
                self.logger.info("PlImporter: deleting stale record: %s" %
                                 record)
                global_dbsession.delete(record)
                global_dbsession.commit()
Exemple #12
0
    def run(self, options):
        config = Config()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = ClabShell(config)

        # retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()

        # Delete all default records
        #for record in all_records:
        #    global_dbsession.delete(record)
        #    global_dbsession.commit()
        #all_records = global_dbsession.query(RegRecord).all()

        # Dicts to avoid duplicates in SFA database
        # create dict keyed by (type,hrn)
        self.records_by_type_hrn = dict([((record.type, record.hrn), record)
                                         for record in all_records])
        # create dict keyed by (type,pointer)
        self.records_by_type_pointer = dict([((record.type, record.pointer),
                                              record) for record in all_records
                                             if record.pointer != -1])

        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
        for record in all_records:
            record.stale = True

        # Retrieve data from the CLab testbed and create dictionaries by id
        # SITE
        sites = [shell.get_testbed_info()]

        # USERS
        users = shell.get_users({})

        #users_by_id = dict ( [ ( user['id'], user) for user in users ] )
        # KEYS
        # auth_tokens of the users. Dict (user_id:[keys])

        # NODES
        nodes = shell.get_nodes({})

        # SLICES
        slices = shell.get_slices({})

        # Import records to the SFA registry
        # SITE
        for site in sites:
            # Get hrn of the site (authority)
            site_hrn = _get_site_hrn(interface_hrn, site)
            # Try to locate the site_hrn in the SFA records
            site_record = self.locate_by_type_hrn('authority', site_hrn)

            if not site_record:
                # Create/Import record for the site authority
                try:
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)
                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    # Create record for the site authority and add it to the Registry
                    site_record = RegAuthority(
                        hrn=site_hrn,
                        gid=auth_info.get_gid_object(),
                        pointer=-1,
                        authority=get_authority(site_hrn))
                    site_record.just_created()
                    global_dbsession.add(site_record)
                    global_dbsession.commit()
                    self.logger.info(
                        "CLabImporter: imported authority (site) : %s" %
                        site_hrn)
                    self.remember_record(site_record)
                except:
                    # if the site import fails then there is no point in trying to import the
                    # site's child records (node, slices, persons), so skip them.
                    self.logger.log_exc(
                        "CLabImporter: failed to import site. Skipping child records"
                    )
                    continue
            else:
                # Authority record already in the SFA registry. Update?
                pass

            # Fresh record in SFA Registry
            site_record.stale = False

            # DEBUG
            #print '*********** ALL RECORDS ***********'
            #all_records = global_dbsession.query(RegRecord).all()
            #for record in all_records:
            #    print record

            # For the current site authority, import child entities/records

            # NODES
            for node in nodes:
                # Obtain parameters of the node: site_auth, site_name and hrn of the node
                site_auth = get_authority(site_hrn)
                site_name = site['name']
                node_hrn = hostname_to_hrn(site_hrn, node['name'])
                # Reduce hrn up to 64 characters
                if len(node_hrn) > 64: node_hrn = node_hrn[:64]

                # Try to locate the node_hrn in the SFA records
                node_record = self.locate_by_type_hrn('node', node_hrn)
                if not node_record:
                    # Create/Import record for the node
                    try:
                        # Create a keypair for the node
                        pkey = Keypair(create=True)
                        # Obtain parameters
                        urn = hrn_to_urn(node_hrn, 'node')
                        node_gid = self.auth_hierarchy.create_gid(
                            urn, create_uuid(), pkey)
                        # Create record for the node and add it to the Registry
                        node_record = RegNode(
                            hrn=node_hrn,
                            gid=node_gid,
                            pointer=node['id'],
                            authority=get_authority(node_hrn))
                        node_record.just_created()
                        global_dbsession.add(node_record)
                        global_dbsession.commit()
                        self.logger.info("CLabImporter: imported node: %s" %
                                         node_hrn)
                        self.remember_record(node_record)
                    except:
                        self.logger.log_exc(
                            "CLabImporter: failed to import node")
                else:
                    # Node record already in the SFA registry. Update?
                    pass

                # Fresh record in SFA Registry
                node_record.stale = False
                # DEBUG
                #print '*********** ALL RECORDS ***********'
                #all_records = global_dbsession.query(RegRecord).all()
                #for record in all_records:
                #    print record

            # USERS
            for user in users:
                # dummyimporter uses email... but Clab can use user['name']
                user_hrn = username_to_hrn(site_hrn, user['name'])
                # Reduce hrn up to 64 characters
                if len(user_hrn) > 64: user_hrn = user_hrn[:64]
                user_urn = hrn_to_urn(user_hrn, 'user')

                # Try to locate the user_hrn in the SFA records
                user_record = self.locate_by_type_hrn('user', user_hrn)

                # Auxiliary function to get the keypair of the user from the testbed database
                # If multiple keys, randomly pick the first key in the set
                # If no keys, generate a new keypair for the user's gird
                def init_user_key(user):
                    pubkey = None
                    pkey = None
                    if user['auth_tokens']:
                        # randomly pick first key in set
                        for key in user['auth_tokens']:
                            pubkey = key
                            try:
                                pkey = convert_public_key(pubkey)
                                break
                            except:
                                continue
                        if not pkey:
                            self.logger.warn(
                                'CLabImporter: unable to convert public key for %s'
                                % user_hrn)
                            pkey = Keypair(create=True)
                    else:
                        # the user has no keys. Creating a random keypair for the user's gid
                        self.logger.warn(
                            "CLabImporter: user %s does not have a CLab public key"
                            % user_hrn)
                        pkey = Keypair(create=True)
                    return (pubkey, pkey)

                ###########################

                try:
                    if not user_record:
                        # Create/Import record for the user
                        # Create a keypair for the node
                        (pubkey, pkey) = init_user_key(user)
                        # Obtain parameters
                        user_gid = self.auth_hierarchy.create_gid(
                            user_urn, create_uuid(), pkey)
                        user_gid.set_email("*****@*****.**" % (user['name']))
                        # Create record for the node and add it to the Registry
                        user_record = RegUser(
                            hrn=user_hrn,
                            gid=user_gid,
                            pointer=user['id'],
                            authority=get_authority(user_hrn),
                            email="*****@*****.**" % (user['name']))
                        if pubkey:
                            user_record.reg_keys = [RegKey(pubkey)]
                        else:
                            self.logger.warning("No key found for user %s" %
                                                user_hrn)
                        user_record.just_created()
                        global_dbsession.add(user_record)
                        global_dbsession.commit()
                        self.logger.info("ClabImporter: imported person: %s" %
                                         user_hrn)
                        self.remember_record(user_record)

                    else:
                        # update the record ?
                        # if user's primary key has changed then we need to update the
                        # users gid by forcing an update here
                        sfa_keys = user_record.reg_keys

                        def key_in_list(key, sfa_keys):
                            for reg_key in sfa_keys:
                                if reg_key.key == key: return True
                            return False

                        # is there a new key in Dummy TB ?
                        new_keys = False
                        for key in user['auth_tokens']:
                            if not key_in_list(key, sfa_keys):
                                new_keys = True
                        if new_keys:
                            (pubkey, pkey) = init_user_key(user)
                            user_gid = self.auth_hierarchy.create_gid(
                                user_urn, create_uuid(), pkey)
                            if not pubkey:
                                user_record.reg_keys = []
                            else:
                                user_record.reg_keys = [RegKey(pubkey)]
                            self.logger.info(
                                "CLabImporter: updated person: %s" % user_hrn)
                    user_record.email = "*****@*****.**" % (user['name'])
                    global_dbsession.commit()

                    # Fresh record in SFA Registry
                    user_record.stale = False
                except:
                    self.logger.log_exc(
                        "CLabImporter: failed to import user %d %s" %
                        (user['id'], user['name']))

            # DEBUG
            #print '*********** ALL RECORDS ***********'
            #all_records = global_dbsession.query(RegRecord).all()
            #for record in all_records:
            #    print record

            # SLICES
            for slice in slices:
                # Obtain parameters of the node: site_auth, site_name and hrn of the slice
                slice_hrn = slicename_to_hrn(site_hrn, slice['name'])
                # Try to locate the slice_hrn in the SFA records
                slice_record = self.locate_by_type_hrn('slice', slice_hrn)

                if not slice_record:
                    # Create/Import record for the slice
                    try:
                        #Create a keypair for the slice
                        pkey = Keypair(create=True)
                        # Obtain parameters
                        urn = hrn_to_urn(slice_hrn, 'slice')
                        slice_gid = self.auth_hierarchy.create_gid(
                            urn, create_uuid(), pkey)
                        # Create record for the slice and add it to the Registry
                        slice_record = RegSlice(
                            hrn=slice_hrn,
                            gid=slice_gid,
                            pointer=slice['id'],
                            authority=get_authority(slice_hrn))
                        slice_record.just_created()
                        global_dbsession.add(slice_record)
                        global_dbsession.commit()
                        self.logger.info("CLabImporter: imported slice: %s" %
                                         slice_hrn)
                        self.remember_record(slice_record)
                    except:
                        self.logger.log_exc(
                            "CLabImporter: failed to import slice")
                else:
                    # Slice record already in the SFA registry. Update?
                    self.logger.warning(
                        "Slice already existing in SFA Registry")
                    pass

                # Get current users associated with the slice
                users_of_slice = shell.get_users_by_slice(slice)
                # record current users associated with the slice
                slice_record.reg_researchers = \
                    [ self.locate_by_type_pointer ('user',user['id']) for user in users_of_slice]
                global_dbsession.commit()

                # Fresh record in SFA Registry
                slice_record.stale = False

        # Remove stale records. Old/non-fresh records that were in the SFA Registry

        # Preserve special records
        system_hrns = [
            interface_hrn, root_auth, interface_hrn + '.slicemanager'
        ]
        for record in all_records:
            if record.hrn in system_hrns:
                record.stale = False
            if record.peer_authority:
                record.stale = False

        # Remove all the records that do not have its stale parameter set to False
        for record in all_records:
            try:
                stale = record.stale
            except:
                stale = True
                self.logger.warning("stale not found with %s" % record)
            if stale:
                self.logger.info("CLabImporter: deleting stale record: %s" %
                                 record)
                global_dbsession.delete(record)
                global_dbsession.commit()

        # DEBUG
        print 'SFA REGISTRY - Result of Import:'
        all_records = global_dbsession.query(RegRecord).all()
        for record in all_records:
            print record
    def run (self, options):
        config = Config ()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = ClabShell (config)
                
        # retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()
        
        # Delete all default records
        #for record in all_records:
        #    global_dbsession.delete(record)
        #    global_dbsession.commit()
        #all_records = global_dbsession.query(RegRecord).all()
        
        # Dicts to avoid duplicates in SFA database
        # create dict keyed by (type,hrn) 
        self.records_by_type_hrn = dict([((record.type, record.hrn), record) for record in all_records ] )
        # create dict keyed by (type,pointer) 
        self.records_by_type_pointer = dict([((record.type, record.pointer), record) for record in all_records if record.pointer != -1])
        
        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
        for record in all_records: 
            record.stale=True
        
        # Retrieve data from the CLab testbed and create dictionaries by id
        # SITE
        sites = [shell.get_testbed_info()]
        
        # USERS
        users = shell.get_users({})
        
        #users_by_id = dict ( [ ( user['id'], user) for user in users ] )
        # KEYS
        # auth_tokens of the users. Dict (user_id:[keys])
        
        # NODES
        nodes = shell.get_nodes({})
        
        # SLICES
        slices = shell.get_slices({})
        
        
        # Import records to the SFA registry
        # SITE
        for site in sites:
            # Get hrn of the site (authority)
            site_hrn = _get_site_hrn(interface_hrn, site)
            # Try to locate the site_hrn in the SFA records
            site_record=self.locate_by_type_hrn ('authority', site_hrn)
            
            if not site_record:
                # Create/Import record for the site authority
                try:
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)
                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    # Create record for the site authority and add it to the Registry
                    site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
                                               pointer= -1,
                                               authority=get_authority(site_hrn))
                    site_record.just_created()
                    global_dbsession.add(site_record)
                    global_dbsession.commit()
                    self.logger.info("CLabImporter: imported authority (site) : %s" % site_hrn) 
                    self.remember_record (site_record)
                except:
                    # if the site import fails then there is no point in trying to import the
                    # site's child records (node, slices, persons), so skip them.
                    self.logger.log_exc("CLabImporter: failed to import site. Skipping child records") 
                    continue 
            else:
                # Authority record already in the SFA registry. Update?
                pass
            
            # Fresh record in SFA Registry
            site_record.stale=False
            
            # DEBUG
            #print '*********** ALL RECORDS ***********'
            #all_records = global_dbsession.query(RegRecord).all()
            #for record in all_records: 
            #    print record
            
             
            # For the current site authority, import child entities/records
            
            # NODES
            for node in nodes:
                # Obtain parameters of the node: site_auth, site_name and hrn of the node
                site_auth = get_authority(site_hrn)
                site_name = site['name']
                node_hrn =  hostname_to_hrn(site_hrn, node['name'])
                # Reduce hrn up to 64 characters
                if len(node_hrn) > 64: node_hrn = node_hrn[:64]
                
                # Try to locate the node_hrn in the SFA records
                node_record = self.locate_by_type_hrn ('node', node_hrn )
                if not node_record:
                    # Create/Import record for the node
                    try:
                        # Create a keypair for the node
                        pkey = Keypair(create=True)
                        # Obtain parameters 
                        urn = hrn_to_urn(node_hrn, 'node')
                        node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        # Create record for the node and add it to the Registry
                        node_record = RegNode (hrn=node_hrn, gid=node_gid, 
                                               pointer =node['id'],
                                               authority=get_authority(node_hrn))
                        node_record.just_created()
                        global_dbsession.add(node_record)
                        global_dbsession.commit()
                        self.logger.info("CLabImporter: imported node: %s" %node_hrn)  
                        self.remember_record (node_record)
                    except:
                        self.logger.log_exc("CLabImporter: failed to import node") 
                else:
                    # Node record already in the SFA registry. Update?
                    pass
                
                # Fresh record in SFA Registry
                node_record.stale=False
                # DEBUG
                #print '*********** ALL RECORDS ***********'
                #all_records = global_dbsession.query(RegRecord).all()
                #for record in all_records: 
                #    print record
                
    
            # USERS
            for user in users:
                # dummyimporter uses email... but Clab can use user['name']
                user_hrn = username_to_hrn (site_hrn, user['name'])
                # Reduce hrn up to 64 characters
                if len(user_hrn) > 64: user_hrn = user_hrn[:64]
                user_urn = hrn_to_urn(user_hrn, 'user')
                
                # Try to locate the user_hrn in the SFA records
                user_record = self.locate_by_type_hrn ('user', user_hrn)


                # Auxiliary function to get the keypair of the user from the testbed database
                # If multiple keys, randomly pick the first key in the set
                # If no keys, generate a new keypair for the user's gird
                def init_user_key (user):
                    pubkey = None
                    pkey = None
                    if  user['auth_tokens']:
                        # randomly pick first key in set
                        for key in user['auth_tokens']:
                            pubkey = key
                            try:
                                pkey = convert_public_key(pubkey)
                                break
                            except:
                                continue
                        if not pkey:
                            self.logger.warn('CLabImporter: unable to convert public key for %s' % user_hrn)
                            pkey = Keypair(create=True)
                    else:
                        # the user has no keys. Creating a random keypair for the user's gid
                        self.logger.warn("CLabImporter: user %s does not have a CLab public key"%user_hrn)
                        pkey = Keypair(create=True)
                    return (pubkey, pkey)
                ###########################
                
                try:
                    if not user_record:
                        # Create/Import record for the user
                        # Create a keypair for the node
                        (pubkey,pkey) = init_user_key (user)
                        # Obtain parameters
                        user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
                        user_gid.set_email("*****@*****.**"%(user['name']))
                        # Create record for the node and add it to the Registry
                        user_record = RegUser (hrn=user_hrn, gid=user_gid, 
                                                 pointer=user['id'], 
                                                 authority=get_authority(user_hrn),
                                                 email="*****@*****.**"%(user['name']))
                        if pubkey: 
                            user_record.reg_keys=[RegKey (pubkey)]
                        else:
                            self.logger.warning("No key found for user %s"%user_hrn)
                        user_record.just_created()
                        global_dbsession.add (user_record)
                        global_dbsession.commit()
                        self.logger.info("ClabImporter: imported person: %s" % user_hrn)
                        self.remember_record ( user_record )

                    else:
                        # update the record ?
                        # if user's primary key has changed then we need to update the 
                        # users gid by forcing an update here
                        sfa_keys = user_record.reg_keys
                        def key_in_list (key,sfa_keys):
                            for reg_key in sfa_keys:
                                if reg_key.key==key: return True
                            return False
                        # is there a new key in Dummy TB ?
                        new_keys=False
                        for key in user['auth_tokens']:
                            if not key_in_list (key,sfa_keys):
                                new_keys = True
                        if new_keys:
                            (pubkey,pkey) = init_user_key (user)
                            user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
                            if not pubkey:
                                user_record.reg_keys=[]
                            else:
                                user_record.reg_keys=[ RegKey (pubkey)]
                            self.logger.info("CLabImporter: updated person: %s" % user_hrn)
                    user_record.email = "*****@*****.**"%(user['name'])
                    global_dbsession.commit()
                                        
                    # Fresh record in SFA Registry
                    user_record.stale=False
                except:
                    self.logger.log_exc("CLabImporter: failed to import user %d %s"%(user['id'],user['name']))
            
            # DEBUG
                #print '*********** ALL RECORDS ***********'
                #all_records = global_dbsession.query(RegRecord).all()
                #for record in all_records: 
                #    print record         
                    
            # SLICES
            for slice in slices:
                # Obtain parameters of the node: site_auth, site_name and hrn of the slice
                slice_hrn = slicename_to_hrn(site_hrn, slice['name'])
                # Try to locate the slice_hrn in the SFA records
                slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
                
                if not slice_record:
                    # Create/Import record for the slice
                    try:
                        #Create a keypair for the slice
                        pkey = Keypair(create=True)
                        # Obtain parameters
                        urn = hrn_to_urn(slice_hrn, 'slice')
                        slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        # Create record for the slice and add it to the Registry
                        slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, 
                                                 pointer=slice['id'],
                                                 authority=get_authority(slice_hrn))
                        slice_record.just_created()
                        global_dbsession.add(slice_record)
                        global_dbsession.commit()
                        self.logger.info("CLabImporter: imported slice: %s" % slice_hrn)  
                        self.remember_record ( slice_record )
                    except:
                        self.logger.log_exc("CLabImporter: failed to import slice")
                else:
                    # Slice record already in the SFA registry. Update?
                    self.logger.warning ("Slice already existing in SFA Registry")
                    pass
                
                # Get current users associated with the slice
                users_of_slice = shell.get_users_by_slice(slice)
                # record current users associated with the slice
                slice_record.reg_researchers = \
                    [ self.locate_by_type_pointer ('user',user['id']) for user in users_of_slice]
                global_dbsession.commit()
                                
                # Fresh record in SFA Registry 
                slice_record.stale=False    
                
     
        # Remove stale records. Old/non-fresh records that were in the SFA Registry
        
        # Preserve special records 
        system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
        for record in all_records: 
            if record.hrn in system_hrns: 
                record.stale=False
            if record.peer_authority:
                record.stale=False
                
        # Remove all the records that do not have its stale parameter set to False
        for record in all_records:
            try:
                stale=record.stale
            except:     
                stale=True
                self.logger.warning("stale not found with %s"%record)
            if stale:
                self.logger.info("CLabImporter: deleting stale record: %s" % record)
                global_dbsession.delete(record)
                global_dbsession.commit()
                
        # DEBUG
        print 'SFA REGISTRY - Result of Import:'
        all_records = global_dbsession.query(RegRecord).all()
        for record in all_records: 
            print record  
Exemple #14
0
    def import_tenants(self, existing_hrns, existing_records):
        # Get all tenants
        # A tenant can represent an organizational group (site) or a
        # slice. If a tenant's authorty/parent matches the root authority it is
        # considered a group/site. All other tenants are considered slices.
        tenants = self.shell.auth_manager.tenants.list()
        tenants_dict = {}
        for tenant in tenants:
            hrn = self.config.SFA_INTERFACE_HRN + '.' + tenant.name
            tenants_dict[hrn] = tenant
            authority_hrn = OSXrn(xrn=hrn,
                                  type='authority').get_authority_hrn()

            if hrn in existing_hrns:
                continue

            if authority_hrn == self.config.SFA_INTERFACE_HRN:
                # import group/site
                record = RegAuthority()
                urn = OSXrn(xrn=hrn, type='authority').get_urn()
                if not self.auth_hierarchy.auth_exists(urn):
                    self.auth_hierarchy.create_auth(urn)
                auth_info = self.auth_hierarchy.get_auth_info(urn)
                gid = auth_info.get_gid_object()
                record.type = 'authority'
                record.hrn = hrn
                record.gid = gid
                record.authority = get_authority(hrn)
                global_dbsession.add(record)
                global_dbsession.commit()
                self.logger.info("OpenstackImporter: imported authority: %s" %
                                 record)

            else:
                record = RegSlice()
                urn = OSXrn(xrn=hrn, type='slice').get_urn()
                pkey = Keypair(create=True)
                gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                record.type = 'slice'
                record.hrn = hrn
                record.gid = gid
                record.authority = get_authority(hrn)
                global_dbsession.add(record)
                global_dbsession.commit()
                self.logger.info("OpenstackImporter: imported slice: %s" %
                                 record)

        return tenants_dict
Exemple #15
0
    def run (self, options):
        config = Config ()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = PlShell (config)

        ######## retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()

        # create hash by (type,hrn) 
        # we essentially use this to know if a given record is already known to SFA 
        self.records_by_type_hrn = \
            dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] )
        # create hash by (type,pointer) 
        self.records_by_type_pointer = \
            dict ( [ ( (record.type, record.pointer) , record ) for record in all_records 
                     if record.pointer != -1] )

        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
        for record in all_records: record.stale=True

        ######## retrieve PLC data
        # Get all plc sites
        # retrieve only required stuf
        sites = shell.GetSites({'peer_id': None, 'enabled' : True},
                               ['site_id','login_base','node_ids','slice_ids','person_ids', 'name', 'hrn'])
        # create a hash of sites by login_base
#        sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
        # Get all plc users
        persons = shell.GetPersons({'peer_id': None, 'enabled': True}, 
                                   ['person_id', 'email', 'key_ids', 'site_ids', 'role_ids', 'hrn'])
        # create a hash of persons by person_id
        persons_by_id = dict ( [ ( person['person_id'], person) for person in persons ] )
        # also gather non-enabled user accounts so as to issue relevant warnings
        disabled_persons = shell.GetPersons({'peer_id': None, 'enabled': False}, ['person_id'])
        disabled_person_ids = [ person['person_id'] for person in disabled_persons ] 
        # Get all plc public keys
        # accumulate key ids for keys retrieval
        key_ids = []
        for person in persons:
            key_ids.extend(person['key_ids'])
        keys = shell.GetKeys( {'peer_id': None, 'key_id': key_ids,
                               'key_type': 'ssh'} )
        # create a hash of keys by key_id
        keys_by_id = dict ( [ ( key['key_id'], key ) for key in keys ] ) 
        # create a dict person_id -> [ (plc)keys ]
        keys_by_person_id = {} 
        for person in persons:
            pubkeys = []
            for key_id in person['key_ids']:
                # by construction all the keys we fetched are ssh keys
                # so gpg keys won't be in there
                try:
                    key = keys_by_id[key_id]
                    pubkeys.append(key)
                except:
                    self.logger.warning("Could not spot key %d - probably non-ssh"%key_id)
            keys_by_person_id[person['person_id']] = pubkeys
        # Get all plc nodes  
        nodes = shell.GetNodes( {'peer_id': None}, ['node_id', 'hostname', 'site_id'])
        # create hash by node_id
        nodes_by_id = dict ( [ ( node['node_id'], node, ) for node in nodes ] )
        # Get all plc slices
        slices = shell.GetSlices( {'peer_id': None}, ['slice_id', 'name', 'person_ids', 'hrn'])
        # create hash by slice_id
        slices_by_id = dict ( [ (slice['slice_id'], slice ) for slice in slices ] )

        # isolate special vini case in separate method
        self.create_special_vini_record (interface_hrn)

        # Get top authority record
        top_auth_record=self.locate_by_type_hrn ('authority', root_auth)
        admins = []

        # start importing 
        for site in sites:
            try:
               site_sfa_created = shell.GetSiteSfaCreated(site['site_id'])
            except: 
               site_sfa_created = None
            if site['name'].startswith('sfa:') or site_sfa_created == 'True':
                continue

            #site_hrn = _get_site_hrn(interface_hrn, site)
            site_hrn = site['hrn']
            # import if hrn is not in list of existing hrns or if the hrn exists
            # but its not a site record
            site_record=self.locate_by_type_hrn ('authority', site_hrn)
            if not site_record:
                try:
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)
                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
                                               pointer=site['site_id'],
                                               authority=get_authority(site_hrn))
                    site_record.just_created()
                    global_dbsession.add(site_record)
                    global_dbsession.commit()
                    self.logger.info("PlImporter: imported authority (site) : %s" % site_record) 
                    self.remember_record (site_record)
                except:
                    # if the site import fails then there is no point in trying to import the
                    # site's child records (node, slices, persons), so skip them.
                    self.logger.log_exc("PlImporter: failed to import site %s. Skipping child records"%site_hrn) 
                    continue 
            else:
                # xxx update the record ...
                pass
            site_record.stale=False
             
            # import node records
            for node_id in site['node_ids']:
                try:
                    node = nodes_by_id[node_id]
                except:
                    self.logger.warning ("PlImporter: cannot find node_id %s - ignored"%node_id)
                    continue 
                site_auth = get_authority(site_hrn)
                site_name = site['login_base']
                node_hrn =  hostname_to_hrn(site_auth, site_name, node['hostname'])
                # xxx this sounds suspicious
                if len(node_hrn) > 64: node_hrn = node_hrn[:64]
                node_record = self.locate_by_type_hrn ( 'node', node_hrn )
                if not node_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(node_hrn, 'node')
                        node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        node_record = RegNode (hrn=node_hrn, gid=node_gid, 
                                               pointer =node['node_id'],
                                               authority=get_authority(node_hrn))
                        node_record.just_created()
                        global_dbsession.add(node_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported node: %s" % node_record)  
                        self.remember_record (node_record)
                    except:
                        self.logger.log_exc("PlImporter: failed to import node %s"%node_hrn) 
                        continue
                else:
                    # xxx update the record ...
                    pass
                node_record.stale=False

            site_pis=[]
            # import persons
            for person_id in site['person_ids']:
                proceed=False
                if person_id in persons_by_id:
                    person=persons_by_id[person_id]
                    proceed=True
                elif person_id in disabled_person_ids:
                    pass
                else:
                    self.logger.warning ("PlImporter: cannot locate person_id %s in site %s - ignored"%(person_id,site_hrn))
                # make sure to NOT run this if anything is wrong
                if not proceed: continue

                #person_hrn = email_to_hrn(site_hrn, person['email'])
                person_hrn = person['hrn']
                if person_hrn is None:
                    self.logger.warn("Person %s has no hrn - skipped"%person['email'])
                    continue
                # xxx suspicious again
                if len(person_hrn) > 64: person_hrn = person_hrn[:64]
                person_urn = hrn_to_urn(person_hrn, 'user')

                user_record = self.locate_by_type_hrn ( 'user', person_hrn)

                # return a tuple pubkey (a plc key object) and pkey (a Keypair object)
                def init_person_key (person, plc_keys):
                    pubkey=None
                    if  person['key_ids']:
                        # randomly pick first key in set
                        pubkey = plc_keys[0]
                        try:
                            pkey = convert_public_key(pubkey['key'])
                        except:
                            self.logger.warn('PlImporter: unable to convert public key for %s' % person_hrn)
                            pkey = Keypair(create=True)
                    else:
                        # the user has no keys. Creating a random keypair for the user's gid
                        self.logger.warn("PlImporter: person %s does not have a PL public key"%person_hrn)
                        pkey = Keypair(create=True)
                    return (pubkey, pkey)

                # new person
                try:
                    plc_keys = keys_by_person_id.get(person['person_id'],[])
                    if not user_record:
                        (pubkey,pkey) = init_person_key (person, plc_keys )
                        person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey, email=person['email'])
                        user_record = RegUser (hrn=person_hrn, gid=person_gid, 
                                               pointer=person['person_id'], 
                                               authority=get_authority(person_hrn),
                                               email=person['email'])
                        if pubkey: 
                            user_record.reg_keys=[RegKey (pubkey['key'], pubkey['key_id'])]
                        else:
                            self.logger.warning("No key found for user %s"%user_record)
                        user_record.just_created()
                        global_dbsession.add (user_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported person: %s" % user_record)
                        self.remember_record ( user_record )
                    else:
                        # update the record ?
                        #
                        # if a user key has changed then we need to update the
                        # users gid by forcing an update here
                        #
                        # right now, SFA only has *one* key attached to a user, and this is
                        # the key that the GID was made with
                        # so the logic here is, we consider that things are OK (unchanged) if
                        # all the SFA keys are present as PLC keys
                        # otherwise we trigger the creation of a new gid from *some* plc key
                        # and record this on the SFA side
                        # it would make sense to add a feature in PLC so that one could pick a 'primary'
                        # key but this is not available on the myplc side for now
                        # = or = it would be much better to support several keys in SFA but that
                        # does not seem doable without a major overhaul in the data model as
                        # a GID is attached to a hrn, but it's also linked to a key, so...
                        # NOTE: with this logic, the first key entered in PLC remains the one
                        # current in SFA until it is removed from PLC
                        sfa_keys = user_record.reg_keys
                        def sfa_key_in_list (sfa_key,plc_keys):
                            for plc_key in plc_keys:
                                if plc_key['key']==sfa_key.key:
                                    return True
                            return False
                        # are all the SFA keys known to PLC ?
                        new_keys=False
                        if not sfa_keys and plc_keys:
                            new_keys=True
                        else: 
                            for sfa_key in sfa_keys:
                                 if not sfa_key_in_list (sfa_key,plc_keys):
                                     new_keys = True
                        if new_keys:
                            (pubkey,pkey) = init_person_key (person, plc_keys)
                            person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey)
                            person_gid.set_email(person['email'])
                            if not pubkey:
                                user_record.reg_keys=[]
                            else:
                                user_record.reg_keys=[ RegKey (pubkey['key'], pubkey['key_id'])]
                            user_record.gid = person_gid
                            user_record.just_updated()
                            self.logger.info("PlImporter: updated person: %s" % user_record)
                    user_record.email = person['email']
                    global_dbsession.commit()
                    user_record.stale=False
                    # accumulate PIs - PLCAPI has a limitation that when someone has PI role
                    # this is valid for all sites she is in..
                    # PI is coded with role_id==20
                    if 20 in person['role_ids']:
                        site_pis.append (user_record)

                    # PL Admins need to marked as PI of the top authority record
                    if 10 in person['role_ids'] and user_record not in top_auth_record.reg_pis:
                        admins.append(user_record)

                except:
                    self.logger.log_exc("PlImporter: failed to import person %d %s"%(person['person_id'],person['email']))
    
            # maintain the list of PIs for a given site
            # for the record, Jordan had proposed the following addition as a welcome hotfix to a previous version:
            # site_pis = list(set(site_pis)) 
            # this was likely due to a bug in the above logic, that had to do with disabled persons
            # being improperly handled, and where the whole loop on persons
            # could be performed twice with the same person...
            # so hopefully we do not need to eliminate duplicates explicitly here anymore
            site_record.reg_pis = list(set(site_pis))
            global_dbsession.commit()

            # import slices
            for slice_id in site['slice_ids']:
                try:
                    slice = slices_by_id[slice_id]
                except:
                    self.logger.warning ("PlImporter: cannot locate slice_id %s - ignored"%slice_id)
                    continue
                #slice_hrn = slicename_to_hrn(interface_hrn, slice['name'])
                slice_hrn = slice['hrn']
                if slice_hrn is None:
                    self.logger.warning("Slice %s has no hrn - skipped"%slice['name'])
                    continue
                slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
                if not slice_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(slice_hrn, 'slice')
                        slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, 
                                                 pointer=slice['slice_id'],
                                                 authority=get_authority(slice_hrn))
                        slice_record.just_created()
                        global_dbsession.add(slice_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported slice: %s" % slice_record)  
                        self.remember_record ( slice_record )
                    except:
                        self.logger.log_exc("PlImporter: failed to import slice %s (%s)"%(slice_hrn,slice['name']))
                else:
                    # xxx update the record ...
                    # given that we record the current set of users anyways, there does not seem to be much left to do here
                    # self.logger.warning ("Slice update not yet implemented on slice %s (%s)"%(slice_hrn,slice['name']))
                    pass
                # record current users affiliated with the slice
                slice_record.reg_researchers = \
                    [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['person_ids'] ]
                global_dbsession.commit()
                slice_record.stale=False

        # Set PL Admins as PI's of the top authority
        if admins:
            top_auth_record.reg_pis = list(set(admins))
            global_dbsession.commit()
            self.logger.info('PlImporter: set PL admins %s as PIs of %s'%(admins,top_auth_record.hrn))

        ### remove stale records
        # special records must be preserved
        system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
        for record in all_records: 
            if record.hrn in system_hrns: 
                record.stale=False
            if record.peer_authority:
                record.stale=False
            if ".vini" in interface_hrn and interface_hrn.endswith('vini') and \
                record.hrn.endswith("internet2"):
                record.stale=False

        for record in all_records:
            try:        stale=record.stale
            except:     
                stale=True
                self.logger.warning("stale not found with %s"%record)
            if stale:
                self.logger.info("PlImporter: deleting stale record: %s" % record)
                global_dbsession.delete(record)
                global_dbsession.commit()
Exemple #16
0
    def run (self, options):
        config = Config ()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = NitosShell (config)

        ######## retrieve all existing SFA objects
        all_records = dbsession.query(RegRecord).all()

        # create hash by (type,hrn) 
        # we essentially use this to know if a given record is already known to SFA 
        self.records_by_type_hrn = \
            dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] )
        # create hash by (type,pointer) 
        self.records_by_type_pointer = \
            dict ( [ ( (record.type, record.pointer) , record ) for record in all_records 
                     if record.pointer != -1] )

        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
        for record in all_records: record.stale=True

        ######## retrieve NITOS data
        # Get site info
        # retrieve only required stuf
        site = shell.getTestbedInfo()
        sites = [site]
        # create a hash of sites by login_base
#       # sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
        # Get all NITOS users
        users = shell.getUsers() 
        # create a hash of users by user_id
        users_by_id = dict ( [ ( user['user_id'], user) for user in users ] )
        # Get all NITOS public keys
        # accumulate key ids for keys retrieval
#        key_ids = []
#        for person in persons:
#            key_ids.extend(person['key_ids'])
#        keys = shell.GetKeys( {'peer_id': None, 'key_id': key_ids,
#                               'key_type': 'ssh'} )
#        # create a hash of keys by key_id
#        keys_by_id = dict ( [ ( key['key_id'], key ) for key in keys ] ) 
        # create a dict user_id -> [ (nitos)keys ]
        keys_by_user_id = dict ( [ ( user['user_id'], user['keys']) for user in users ] ) 
        # Get all nitos nodes  
        nodes = shell.getNodes({}, [])
        # create hash by node_id
        nodes_by_id = dict ( [ (node['node_id'], node) for node in nodes ] )
        # Get all nitos slices
        slices = shell.getSlices({}, [])
        # create hash by slice_id
        slices_by_id = dict ( [ (slice['slice_id'], slice) for slice in slices ] )


        # start importing 
        for site in sites:
        #for i in [0]:
            site_hrn = _get_site_hrn(interface_hrn, site)
            # import if hrn is not in list of existing hrns or if the hrn exists
            # but its not a site record
            site_record=self.locate_by_type_hrn ('authority', site_hrn)
            if not site_record:
                try:
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)
                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
                                               pointer=0,
                                               authority=get_authority(site_hrn))
                    site_record.just_created()
                    dbsession.add(site_record)
                    dbsession.commit()
                    self.logger.info("NitosImporter: imported authority (site) : %s" % site_record) 
                    self.remember_record (site_record)
                except:
                    # if the site import fails then there is no point in trying to import the
                    # site's child records (node, slices, persons), so skip them.
                    self.logger.log_exc("NitosImporter: failed to import site. Skipping child records") 
                    continue 
            else:
                # xxx update the record ...
                pass
            site_record.stale=False
             
            # import node records
            for node in nodes:
                site_auth = get_authority(site_hrn)
                site_name = site['name']
                node_hrn =  hostname_to_hrn(site_auth, site_name, node['hostname'])
                # xxx this sounds suspicious
                if len(node_hrn) > 64: node_hrn = node_hrn[:64]
                node_record = self.locate_by_type_hrn ( 'node', node_hrn )
                if not node_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(node_hrn, 'node')
                        node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        node_record = RegNode (hrn=node_hrn, gid=node_gid, 
                                               pointer =node['node_id'],
                                               authority=get_authority(node_hrn))
                        node_record.just_created()
                        dbsession.add(node_record)
                        dbsession.commit()
                        self.logger.info("NitosImporter: imported node: %s" % node_record)  
                        self.remember_record (node_record)
                    except:
                           self.logger.log_exc("NitosImporter: failed to import node")
                else:
                    # xxx update the record ...
                    pass
                
                node_record.stale=False


            # import users
            for user in users:
                user_hrn = username_to_hrn(interface_hrn, site['name'], user['username'])
                # xxx suspicious again
                if len(user_hrn) > 64: user_hrn = user_hrn[:64]
                user_urn = hrn_to_urn(user_hrn, 'user')

                user_record = self.locate_by_type_hrn ( 'user', user_hrn)

                # return a tuple pubkey (a nitos key object) and pkey (a Keypair object)
                def init_user_key (user):
                    pubkey = None
                    pkey = None
                    if  user['keys']:
                        # randomly pick first key in set
                        for key in user['keys']:
                             pubkey = key
                             try:
                                pkey = convert_public_key(pubkey)
                                break
                             except:
                                continue
                        if not pkey:
                            self.logger.warn('NitosImporter: unable to convert public key for %s' % user_hrn)
                            pkey = Keypair(create=True)
                    else:
                        # the user has no keys. Creating a random keypair for the user's gid
                        self.logger.warn("NitosImporter: user %s does not have a NITOS public key"%user_hrn)
                        pkey = Keypair(create=True)
                    return (pubkey, pkey)

                # new user
                try:
                    if not user_record:
                        (pubkey,pkey) = init_user_key (user)
                        user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
                        user_gid.set_email(user['email'])
                        user_record = RegUser (hrn=user_hrn, gid=user_gid, 
                                                 pointer=user['user_id'], 
                                                 authority=get_authority(user_hrn),
                                                 email=user['email'])
                        if pubkey: 
                            user_record.reg_keys=[RegKey (pubkey)]
                        else:
                            self.logger.warning("No key found for user %s"%user_record)
                        user_record.just_created()
                        dbsession.add (user_record)
                        dbsession.commit()
                        self.logger.info("NitosImporter: imported user: %s" % user_record)
                        self.remember_record ( user_record )
                    else:
                        # update the record ?
                        # if user's primary key has changed then we need to update the 
                        # users gid by forcing an update here
                        sfa_keys = user_record.reg_keys

                        def sfa_key_in_list (sfa_key,nitos_user_keys):
                            for nitos_key in nitos_user_keys:
                                if nitos_key==sfa_key: return True
                            return False
                        # are all the SFA keys known to nitos ?
                        new_keys=False
                        if not sfa_keys and user['keys']:
                            new_keys = True
                        else:
                            for sfa_key in sfa_keys:
                                 if not sfa_key_in_list (sfa_key.key,user['keys']):
                                     new_keys = True

                        if new_keys:
                            (pubkey,pkey) = init_user_key (user)
                            user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
                            if not pubkey:
                                user_record.reg_keys=[]
                            else:
                                user_record.reg_keys=[ RegKey (pubkey)]
                            user_record.gid = user_gid
                            user_record.just_updated()
                            self.logger.info("NitosImporter: updated user: %s" % user_record)
                    user_record.email = user['email']
                    dbsession.commit()
                    user_record.stale=False
                except:
                    self.logger.log_exc("NitosImporter: failed to import user %s %s"%(user['user_id'],user['email']))
    

            # import slices
            for slice in slices:
                slice_hrn = slicename_to_hrn(interface_hrn, site['name'], slice['slice_name'])
                slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
                if not slice_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(slice_hrn, 'slice')
                        slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, 
                                                 pointer=slice['slice_id'],
                                                 authority=get_authority(slice_hrn))
                        slice_record.just_created()
                        dbsession.add(slice_record)
                        dbsession.commit()
                        self.logger.info("NitosImporter: imported slice: %s" % slice_record)  
                        self.remember_record ( slice_record )
                    except:
                        self.logger.log_exc("NitosImporter: failed to import slice")
                else:
                    # xxx update the record ...
                    self.logger.warning ("Slice update not yet implemented")
                    pass
                # record current users affiliated with the slice
                slice_record.reg_researchers = \
                      [ self.locate_by_type_pointer ('user',int(user_id)) for user_id in slice['user_ids'] ]
                dbsession.commit()
                slice_record.stale=False


        ### remove stale records
        # special records must be preserved
        system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
        for record in all_records: 
            if record.hrn in system_hrns: 
                record.stale=False
            if record.peer_authority:
                record.stale=False

        for record in all_records:
            try:        stale=record.stale
            except:     
                stale=True
                self.logger.warning("stale not found with %s"%record)
            if stale:
                self.logger.info("NitosImporter: deleting stale record: %s" % record)
                dbsession.delete(record)
                dbsession.commit()
Exemple #17
0
    def run (self, options):
        config = Config()

        slabdriver = SlabDriver(config)
        
        #Create special slice table for senslab 
        
        if not slabdriver.db.exists('slab_xp'):
            slabdriver.db.createtable()
            self.logger.info ("SlabImporter.run:  slab_xp table created ")

        #retrieve all existing SFA objects
        all_records = dbsession.query(RegRecord).all()
      
        #create hash by (type,hrn) 
        #used  to know if a given record is already known to SFA 
       
        self.records_by_type_hrn = \
            dict ( [ ( (record.type,record.hrn) , record ) for record in all_records ] )
        print>>sys.stderr,"\r\n SLABIMPORT \t all_records[0] %s all_records[0].email %s \r\n" %(all_records[0].type, all_records[0])
        self.users_rec_by_email = \
            dict ( [ (record.email, record) for record in all_records if record.type == 'user' ] )
            
        # create hash by (type,pointer) 
        self.records_by_type_pointer = \
            dict ( [ ( (str(record.type),record.pointer) , record ) for record in all_records  if record.pointer != -1] )

        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
        for record in all_records: 
            record.stale=True
        
        nodes_listdict  = slabdriver.GetNodes()
        nodes_by_id = dict([(node['node_id'],node) for node in nodes_listdict])
        sites_listdict  = slabdriver.GetSites()
        
        ldap_person_listdict = slabdriver.GetPersons()
        print>>sys.stderr,"\r\n SLABIMPORT \t ldap_person_listdict %s \r\n" %(ldap_person_listdict)
        slices_listdict = slabdriver.GetSlices()
        try:
            slices_by_userid = dict ( [ (one_slice['reg_researchers']['record_id'], one_slice ) for one_slice in slices_listdict ] )
        except TypeError:
             self.logger.log_exc("SlabImporter: failed to create list of slices by user id.") 
             pass
 
        for site in sites_listdict:
            site_hrn = _get_site_hrn(site) 
            site_record = self.find_record_by_type_hrn ('authority', site_hrn)
            if not site_record:
                try:
                    urn = hrn_to_urn(site_hrn, 'authority') 
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)
                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
                                               pointer='-1',
                                               authority=get_authority(site_hrn))
                    site_record.just_created()
                    dbsession.add(site_record)
                    dbsession.commit()
                    self.logger.info("SlabImporter: imported authority (site) : %s" % site_record) 
                    self.update_just_added_records_dict(site_record)
                except SQLAlchemyError:
                    # if the site import fails then there is no point in trying to import the
                    # site's child records (node, slices, persons), so skip them.
                    self.logger.log_exc("SlabImporter: failed to import site. Skipping child records") 
                    continue
            else:
                # xxx update the record ...
                pass
            site_record.stale=False 
            
         # import node records in site
            for node_id in site['node_ids']:
                try:
                    node = nodes_by_id[node_id]
                except:
                    self.logger.warning ("SlabImporter: cannot find node_id %s - ignored"%node_id)
                    continue 
                site_auth = get_authority(site_hrn)
                site_name = site['name']                
                escaped_hrn =  self.hostname_to_hrn_escaped(slabdriver.root_auth, node['hostname'])
                print>>sys.stderr, "\r\n \r\n SLABIMPORTER node %s " %(node)               
                hrn =  node['hrn']


                # xxx this sounds suspicious
                if len(hrn) > 64: hrn = hrn[:64]
                node_record = self.find_record_by_type_hrn( 'node', hrn )
                if not node_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(escaped_hrn, 'node') 
                        node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        def slab_get_authority(hrn):
                            return hrn.split(".")[0]
                            
                        node_record = RegNode (hrn=hrn, gid=node_gid, 
                                                pointer = '-1',
                                                authority=slab_get_authority(hrn)) 
                        node_record.just_created()
                        dbsession.add(node_record)
                        dbsession.commit()
                        #self.logger.info("SlabImporter: imported node: %s" % node_record)  
                        self.update_just_added_records_dict(node_record)
                    except:
                        self.logger.log_exc("SlabImporter: failed to import node") 
                else:
                    # xxx update the record ...
                    pass
                node_record.stale=False
                    
                    
        # import persons
        for person in ldap_person_listdict : 
            

            print>>sys.stderr,"SlabImporter: person: %s" %(person['hrn'])
            if 'ssh-rsa' not in person['pkey']:
                #people with invalid ssh key (ssh-dss, empty, bullshit keys...)
                #won't be imported
                continue
            person_hrn = person['hrn']
            slice_hrn = self.slicename_to_hrn(person['hrn'])
            
            # xxx suspicious again
            if len(person_hrn) > 64: person_hrn = person_hrn[:64]
            person_urn = hrn_to_urn(person_hrn, 'user')
            
            
            print>>sys.stderr," \r\n SlabImporter:  HEYYYYYYYYYY" , self.users_rec_by_email
            
            #Check if user using person['email'] form LDAP is already registered
            #in SFA. One email = one person. Inb this case, do not create another
            #record for this person
            #person_hrn  returned by GetPErson based on senslab root auth + uid ldap
            user_record = self.find_record_by_type_hrn('user', person_hrn)
            if not user_record and  person['email'] in self.users_rec_by_email:
                user_record = self.users_rec_by_email[person['email']]
                person_hrn = user_record.hrn
                person_urn = hrn_to_urn(person_hrn, 'user')
                
            
            slice_record = self.find_record_by_type_hrn ('slice', slice_hrn)
            
            # return a tuple pubkey (a plc key object) and pkey (a Keypair object)
            def init_person_key (person, slab_key):
                pubkey = None
                if  person['pkey']:
                    # randomly pick first key in set
                    pubkey = slab_key
                    
                    try:
                        pkey = convert_public_key(pubkey)
                    except TypeError:
                        #key not good. create another pkey
                        self.logger.warn('SlabImporter: \
                                            unable to convert public \
                                            key for %s' % person_hrn)
                        pkey = Keypair(create=True)
                    
                else:
                    # the user has no keys. Creating a random keypair for the user's gid
                    self.logger.warn("SlabImporter: person %s does not have a  public key"%person_hrn)
                    pkey = Keypair(create=True) 
                return (pubkey, pkey)
                            
                
            try:
                slab_key = person['pkey']
                # new person
                if not user_record:
                    (pubkey,pkey) = init_person_key (person, slab_key )
                    if pubkey is not None and pkey is not None :
                        person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey)
                        if person['email']:
                            print>>sys.stderr, "\r\n \r\n SLAB IMPORTER PERSON EMAIL OK email %s " %(person['email'])
                            person_gid.set_email(person['email'])
                            user_record = RegUser (hrn=person_hrn, gid=person_gid, 
                                                    pointer='-1', 
                                                    authority=get_authority(person_hrn),
                                                    email=person['email'])
                        else:
                            user_record = RegUser (hrn=person_hrn, gid=person_gid, 
                                                    pointer='-1', 
                                                    authority=get_authority(person_hrn))
                            
                        if pubkey: 
                            user_record.reg_keys = [RegKey (pubkey)]
                        else:
                            self.logger.warning("No key found for user %s"%user_record)
                        user_record.just_created()
                        dbsession.add (user_record)
                        dbsession.commit()
                        self.logger.info("SlabImporter: imported person: %s" % user_record)
                        self.update_just_added_records_dict( user_record )
                else:
                    # update the record ?
                    # if user's primary key has changed then we need to update the 
                    # users gid by forcing an update here
                    sfa_keys = user_record.reg_keys
                   
                    new_key=False
                    if slab_key is not sfa_keys : 
                        new_key = True
                    if new_key:
                        print>>sys.stderr,"SlabImporter: \t \t USER UPDATE person: %s" %(person['hrn'])
                        (pubkey,pkey) = init_person_key (person, slab_key)
                        person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey)
                        if not pubkey:
                            user_record.reg_keys=[]
                        else:
                            user_record.reg_keys=[ RegKey (pubkey)]
                        self.logger.info("SlabImporter: updated person: %s" % user_record)
                        
                    if person['email']:
                        user_record.email = person['email']
                        
                dbsession.commit()

                user_record.stale = False
            except:
                self.logger.log_exc("SlabImporter: failed to import person  %s"%(person) )       
            
            try:
                slice = slices_by_userid[user_record.record_id]
            except:
                self.logger.warning ("SlabImporter: cannot locate slices_by_userid[user_record.record_id] %s - ignored"%user_record)  
                    
            if not slice_record :
                try:
                    pkey = Keypair(create=True)
                    urn = hrn_to_urn(slice_hrn, 'slice')
                    slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                    slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, 
                                                pointer='-1',
                                                authority=get_authority(slice_hrn))
                    
                    slice_record.just_created()
                    dbsession.add(slice_record)
                    dbsession.commit()
                    
                    #Serial id created after commit
                    #Get it
                    sl_rec = dbsession.query(RegSlice).filter(RegSlice.hrn.match(slice_hrn)).all()
                    
                    #slab_slice = SenslabXP( slice_hrn = slice_hrn, record_id_slice=sl_rec[0].record_id, record_id_user= user_record.record_id)
                    #print>>sys.stderr, "\r\n \r\n SLAB IMPORTER SLICE IMPORT NOTslice_record %s \r\n slab_slice %s" %(sl_rec,slab_slice)
                    #slab_dbsession.add(slab_slice)
                    #slab_dbsession.commit()
                    #self.logger.info("SlabImporter: imported slice: %s" % slice_record)  
                    self.update_just_added_records_dict ( slice_record )

                except:
                    self.logger.log_exc("SlabImporter: failed to import slice")
                    
            #No slice update upon import in senslab 
            else:
                # xxx update the record ...
                self.logger.warning ("Slice update not yet implemented")
                pass
            # record current users affiliated with the slice


            slice_record.reg_researchers =  [user_record]
            dbsession.commit()
            slice_record.stale=False 
                       
  
                 
         ### remove stale records
        # special records must be preserved
        system_hrns = [slabdriver.hrn, slabdriver.root_auth,  slabdriver.hrn+ '.slicemanager']
        for record in all_records: 
            if record.hrn in system_hrns: 
                record.stale=False
            if record.peer_authority:
                record.stale=False
          

        for record in all_records: 
            if record.type == 'user':
                print>>sys.stderr,"SlabImporter: stale records: hrn %s %s" %(record.hrn,record.stale)
            try:        
                stale=record.stale
            except:     
                stale=True
                self.logger.warning("stale not found with %s"%record)
            if stale:
                self.logger.info("SlabImporter: deleting stale record: %s" % record)
                #if record.type == 'user':
                    #rec = slab_dbsession.query(SenslabXP).filter_by(record_id_user = record.record_id).first()
                    #slab_dbsession.delete(rec)
                    #slab_dbsession.commit()
                dbsession.delete(record)
                dbsession.commit()