コード例 #1
0
ファイル: sfa-start.py プロジェクト: tubav/sfa
def update_cert_records(gids):
    """
    Make sure there is a record in the registry for the specified gids. 
    Removes old records from the db.
    """
    # import db stuff here here so this module can be loaded by PlcComponentApi
    from sfa.storage.alchemy import dbsession
    from sfa.storage.model import RegRecord
    if not gids:
        return
    # get records that actually exist in the db
    gid_urns = [gid.get_urn() for gid in gids]
    hrns_expected = [gid.get_hrn() for gid in gids]
    records_found = dbsession.query(RegRecord).\
        filter_by(pointer=-1).filter(RegRecord.hrn.in_(hrns_expected)).all()

    # remove old records
    for record in records_found:
        if record.hrn not in hrns_expected and \
            record.hrn != self.api.config.SFA_INTERFACE_HRN:
            dbsession.delete(record)

    # TODO: store urn in the db so we do this in 1 query 
    for gid in gids:
        hrn, type = gid.get_hrn(), gid.get_type()
        record = dbsession.query(RegRecord).filter_by(hrn=hrn, type=type,pointer=-1).first()
        if not record:
            record = RegRecord (dict= {'type':type,
                                       'hrn': hrn, 
                                       'authority': get_authority(hrn),
                                       'gid': gid.save_to_string(save_parents=True),
                                       })
            dbsession.add(record)
    dbsession.commit()
コード例 #2
0
ファイル: openstackimporter.py プロジェクト: tubav/sfa
    def run (self, options):
        # we don't have any options for now
        self.logger.info ("OpenstackImporter.run : to do")

        # create dict of all existing sfa records
        existing_records = {}
        existing_hrns = []
        key_ids = []
        for record in dbsession.query(RegRecord):
            existing_records[ (record.hrn, record.type,) ] = record
            existing_hrns.append(record.hrn) 
            

        tenants_dict = self.import_tenants(existing_hrns, existing_records)
        users_dict, user_keys = self.import_users(existing_hrns, existing_records)
                
        # remove stale records    
        system_records = [self.interface_hrn, self.root_auth, self.interface_hrn + '.slicemanager']
        for (record_hrn, type) in existing_records.keys():
            if record_hrn in system_records:
                continue
        
            record = existing_records[(record_hrn, type)]
            if record.peer_authority:
                continue

            if type == 'user':
                if record_hrn in users_dict:
                    continue  
            elif type in['slice', 'authority']:
                if record_hrn in tenants_dict:
                    continue
            else:
                continue 
        
            record_object = existing_records[ (record_hrn, type) ]
            self.logger.info("OpenstackImporter: removing %s " % record)
            dbsession.delete(record_object)
            dbsession.commit()
                                   
        # save pub keys
        self.logger.info('OpenstackImporter: saving current pub keys')
        keys_filename = self.config.config_path + os.sep + 'person_keys.py'
        save_keys(keys_filename, user_keys)                
コード例 #3
0
ファイル: registry_manager.py プロジェクト: tubav/sfa
    def Remove(self, api, xrn, origin_hrn=None):
        hrn=xrn.get_hrn()
        type=xrn.get_type()
        request=dbsession.query(RegRecord).filter_by(hrn=hrn)
        if type and type not in ['all', '*']:
            request=request.filter_by(type=type)
    
        record = request.first()
        if not record:
            msg="Could not find hrn %s"%hrn
            if type: msg += " type=%s"%type
            raise RecordNotFound(msg)

        type = record.type
        if type not in ['slice', 'user', 'node', 'authority'] :
            raise UnknownSfaType(type)

        credential = api.getCredential()
        registries = api.registries
    
        # Try to remove the object from the PLCDB of federated agg.
        # This is attempted before removing the object from the local agg's PLCDB and sfa table
        if hrn.startswith(api.hrn) and type in ['user', 'slice', 'authority']:
            for registry in registries:
                if registry not in [api.hrn]:
                    try:
                        result=registries[registry].remove_peer_object(credential, record, origin_hrn)
                    except:
                        pass

        # call testbed callback first
        # IIUC this is done on the local testbed TOO because of the refreshpeer link
        if not self.driver.remove(record.__dict__):
            logger.warning("driver.remove failed")

        # delete from sfa db
        dbsession.delete(record)
        dbsession.commit()
    
        return 1
コード例 #4
0
ファイル: dummyimporter.py プロジェクト: tubav/sfa
    def run (self, options):
        config = Config ()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = DummyShell (config)

        ######## retrieve all existing SFA objects
        all_records = dbsession.query(RegRecord).all()

        # create hash by (type,hrn) 
        # we essentially use this to know if a given record is already known to SFA 
        self.records_by_type_hrn = \
            dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] )
        # create hash by (type,pointer) 
        self.records_by_type_pointer = \
            dict ( [ ( (record.type, record.pointer) , record ) for record in all_records 
                     if record.pointer != -1] )

        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
        for record in all_records: record.stale=True

        ######## retrieve Dummy TB data
        # Get all plc sites
        # retrieve only required stuf
        sites = [shell.GetTestbedInfo()]
        # create a hash of sites by login_base
#        sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
        # Get all dummy TB users
        users = shell.GetUsers()
        # create a hash of users by user_id
        users_by_id = dict ( [ ( user['user_id'], user) for user in users ] )
        # Get all dummy TB public keys
        keys = []
        for user in users:
            if 'keys' in user:
                keys.extend(user['keys'])
        # create a dict user_id -> [ keys ]
        keys_by_person_id = {} 
        for user in users:
             if 'keys' in user:
                 keys_by_person_id[user['user_id']] = user['keys']
        # Get all dummy TB nodes  
        nodes = shell.GetNodes()
        # create hash by node_id
        nodes_by_id = dict ( [ ( node['node_id'], node, ) for node in nodes ] )
        # Get all dummy TB slices
        slices = shell.GetSlices()
        # create hash by slice_id
        slices_by_id = dict ( [ (slice['slice_id'], slice ) for slice in slices ] )


        # start importing 
        for site in sites:
            site_hrn = _get_site_hrn(interface_hrn, site)
            # import if hrn is not in list of existing hrns or if the hrn exists
            # but its not a site record
            site_record=self.locate_by_type_hrn ('authority', site_hrn)
            if not site_record:
                try:
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)
                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
                                               pointer= -1,
                                               authority=get_authority(site_hrn))
                    site_record.just_created()
                    dbsession.add(site_record)
                    dbsession.commit()
                    self.logger.info("DummyImporter: imported authority (site) : %s" % site_record) 
                    self.remember_record (site_record)
                except:
                    # if the site import fails then there is no point in trying to import the
                    # site's child records (node, slices, persons), so skip them.
                    self.logger.log_exc("DummyImporter: failed to import site. Skipping child records") 
                    continue 
            else:
                # xxx update the record ...
                pass
            site_record.stale=False
             
            # import node records
            for node in nodes:
                site_auth = get_authority(site_hrn)
                site_name = site['name']
                node_hrn =  hostname_to_hrn(site_auth, site_name, node['hostname'])
                # xxx this sounds suspicious
                if len(node_hrn) > 64: node_hrn = node_hrn[:64]
                node_record = self.locate_by_type_hrn ( 'node', node_hrn )
                if not node_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(node_hrn, 'node')
                        node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        node_record = RegNode (hrn=node_hrn, gid=node_gid, 
                                               pointer =node['node_id'],
                                               authority=get_authority(node_hrn))
                        node_record.just_created()
                        dbsession.add(node_record)
                        dbsession.commit()
                        self.logger.info("DummyImporter: imported node: %s" % node_record)  
                        self.remember_record (node_record)
                    except:
                        self.logger.log_exc("DummyImporter: failed to import node") 
                else:
                    # xxx update the record ...
                    pass
                node_record.stale=False

            site_pis=[]
            # import users
            for user in users:
                user_hrn = email_to_hrn(site_hrn, user['email'])
                # xxx suspicious again
                if len(user_hrn) > 64: user_hrn = user_hrn[:64]
                user_urn = hrn_to_urn(user_hrn, 'user')

                user_record = self.locate_by_type_hrn ( 'user', user_hrn)

                # return a tuple pubkey (a dummy TB key object) and pkey (a Keypair object)

                def init_user_key (user):
                    pubkey = None
                    pkey = None
                    if  user['keys']:
                        # randomly pick first key in set
                        for key in user['keys']:
                             pubkey = key
                             try:
                                pkey = convert_public_key(pubkey)
                                break
                             except:
                                continue
                        if not pkey:
                            self.logger.warn('DummyImporter: unable to convert public key for %s' % user_hrn)
                            pkey = Keypair(create=True)
                    else:
                        # the user has no keys. Creating a random keypair for the user's gid
                        self.logger.warn("DummyImporter: user %s does not have a NITOS public key"%user_hrn)
                        pkey = Keypair(create=True)
                    return (pubkey, pkey)

                # new user
                try:
                    if not user_record:
                        (pubkey,pkey) = init_user_key (user)
                        user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
                        user_gid.set_email(user['email'])
                        user_record = RegUser (hrn=user_hrn, gid=user_gid, 
                                                 pointer=user['user_id'], 
                                                 authority=get_authority(user_hrn),
                                                 email=user['email'])
                        if pubkey: 
                            user_record.reg_keys=[RegKey (pubkey)]
                        else:
                            self.logger.warning("No key found for user %s"%user_record)
                        user_record.just_created()
                        dbsession.add (user_record)
                        dbsession.commit()
                        self.logger.info("DummyImporter: imported person: %s" % user_record)
                        self.remember_record ( user_record )

                    else:
                        # update the record ?
                        # if user's primary key has changed then we need to update the 
                        # users gid by forcing an update here
                        sfa_keys = user_record.reg_keys
                        def key_in_list (key,sfa_keys):
                            for reg_key in sfa_keys:
                                if reg_key.key==key: return True
                            return False
                        # is there a new key in Dummy TB ?
                        new_keys=False
                        for key in user['keys']:
                            if not key_in_list (key,sfa_keys):
                                new_keys = True
                        if new_keys:
                            (pubkey,pkey) = init_user_key (user)
                            user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
                            if not pubkey:
                                user_record.reg_keys=[]
                            else:
                                user_record.reg_keys=[ RegKey (pubkey)]
                            self.logger.info("DummyImporter: updated person: %s" % user_record)
                    user_record.email = user['email']
                    dbsession.commit()
                    user_record.stale=False
                except:
                    self.logger.log_exc("DummyImporter: failed to import user %d %s"%(user['user_id'],user['email']))
    

            # import slices
            for slice in slices:
                slice_hrn = slicename_to_hrn(site_hrn, slice['slice_name'])
                slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
                if not slice_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(slice_hrn, 'slice')
                        slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, 
                                                 pointer=slice['slice_id'],
                                                 authority=get_authority(slice_hrn))
                        slice_record.just_created()
                        dbsession.add(slice_record)
                        dbsession.commit()
                        self.logger.info("DummyImporter: imported slice: %s" % slice_record)  
                        self.remember_record ( slice_record )
                    except:
                        self.logger.log_exc("DummyImporter: failed to import slice")
                else:
                    # xxx update the record ...
                    self.logger.warning ("Slice update not yet implemented")
                    pass
                # record current users affiliated with the slice
                slice_record.reg_researchers = \
                    [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['user_ids'] ]
                dbsession.commit()
                slice_record.stale=False

        ### remove stale records
        # special records must be preserved
        system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
        for record in all_records: 
            if record.hrn in system_hrns: 
                record.stale=False
            if record.peer_authority:
                record.stale=False

        for record in all_records:
            try:        stale=record.stale
            except:     
                stale=True
                self.logger.warning("stale not found with %s"%record)
            if stale:
                self.logger.info("DummyImporter: deleting stale record: %s" % record)
                dbsession.delete(record)
                dbsession.commit()
コード例 #5
0
ファイル: plimporter.py プロジェクト: tubav/sfa
    def run (self, options):
        config = Config ()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = PlShell (config)

        ######## retrieve all existing SFA objects
        all_records = dbsession.query(RegRecord).all()

        # create hash by (type,hrn) 
        # we essentially use this to know if a given record is already known to SFA 
        self.records_by_type_hrn = \
            dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] )
        # create hash by (type,pointer) 
        self.records_by_type_pointer = \
            dict ( [ ( (record.type, record.pointer) , record ) for record in all_records 
                     if record.pointer != -1] )

        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
        for record in all_records: record.stale=True

        ######## retrieve PLC data
        # Get all plc sites
        # retrieve only required stuf
        sites = shell.GetSites({'peer_id': None, 'enabled' : True},
                               ['site_id','login_base','node_ids','slice_ids','person_ids',])
        # create a hash of sites by login_base
#        sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
        # Get all plc users
        persons = shell.GetPersons({'peer_id': None, 'enabled': True}, 
                                   ['person_id', 'email', 'key_ids', 'site_ids', 'role_ids'])
        # create a hash of persons by person_id
        persons_by_id = dict ( [ ( person['person_id'], person) for person in persons ] )
        # also gather non-enabled user accounts so as to issue relevant warnings
        disabled_persons = shell.GetPersons({'peer_id': None, 'enabled': False}, ['person_id'])
        disabled_person_ids = [ person['person_id'] for person in disabled_persons ] 
        # Get all plc public keys
        # accumulate key ids for keys retrieval
        key_ids = []
        for person in persons:
            key_ids.extend(person['key_ids'])
        keys = shell.GetKeys( {'peer_id': None, 'key_id': key_ids,
                               'key_type': 'ssh'} )
        # create a hash of keys by key_id
        keys_by_id = dict ( [ ( key['key_id'], key ) for key in keys ] ) 
        # create a dict person_id -> [ (plc)keys ]
        keys_by_person_id = {} 
        for person in persons:
            pubkeys = []
            for key_id in person['key_ids']:
                # by construction all the keys we fetched are ssh keys
                # so gpg keys won't be in there
                try:
                    key = keys_by_id[key_id]
                    pubkeys.append(key)
                except:
                    self.logger.warning("Could not spot key %d - probably non-ssh"%key_id)
            keys_by_person_id[person['person_id']] = pubkeys
        # Get all plc nodes  
        nodes = shell.GetNodes( {'peer_id': None}, ['node_id', 'hostname', 'site_id'])
        # create hash by node_id
        nodes_by_id = dict ( [ ( node['node_id'], node, ) for node in nodes ] )
        # Get all plc slices
        slices = shell.GetSlices( {'peer_id': None}, ['slice_id', 'name', 'person_ids'])
        # create hash by slice_id
        slices_by_id = dict ( [ (slice['slice_id'], slice ) for slice in slices ] )

        # isolate special vini case in separate method
        self.create_special_vini_record (interface_hrn)

        # start importing 
        for site in sites:
            site_hrn = _get_site_hrn(interface_hrn, site)
            # import if hrn is not in list of existing hrns or if the hrn exists
            # but its not a site record
            site_record=self.locate_by_type_hrn ('authority', site_hrn)
            if not site_record:
                try:
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)
                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
                                               pointer=site['site_id'],
                                               authority=get_authority(site_hrn))
                    site_record.just_created()
                    dbsession.add(site_record)
                    dbsession.commit()
                    self.logger.info("PlImporter: imported authority (site) : %s" % site_record) 
                    self.remember_record (site_record)
                except:
                    # if the site import fails then there is no point in trying to import the
                    # site's child records (node, slices, persons), so skip them.
                    self.logger.log_exc("PlImporter: failed to import site %s. Skipping child records"%site_hrn) 
                    continue 
            else:
                # xxx update the record ...
                pass
            site_record.stale=False
             
            # import node records
            for node_id in site['node_ids']:
                try:
                    node = nodes_by_id[node_id]
                except:
                    self.logger.warning ("PlImporter: cannot find node_id %s - ignored"%node_id)
                    continue 
                site_auth = get_authority(site_hrn)
                site_name = site['login_base']
                node_hrn =  hostname_to_hrn(site_auth, site_name, node['hostname'])
                # xxx this sounds suspicious
                if len(node_hrn) > 64: node_hrn = node_hrn[:64]
                node_record = self.locate_by_type_hrn ( 'node', node_hrn )
                if not node_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(node_hrn, 'node')
                        node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        node_record = RegNode (hrn=node_hrn, gid=node_gid, 
                                               pointer =node['node_id'],
                                               authority=get_authority(node_hrn))
                        node_record.just_created()
                        dbsession.add(node_record)
                        dbsession.commit()
                        self.logger.info("PlImporter: imported node: %s" % node_record)  
                        self.remember_record (node_record)
                    except:
                        self.logger.log_exc("PlImporter: failed to import node %s"%node_hrn) 
                        continue
                else:
                    # xxx update the record ...
                    pass
                node_record.stale=False

            site_pis=[]
            # import persons
            for person_id in site['person_ids']:
                proceed=False
                if person_id in persons_by_id:
                    person=persons_by_id[person_id]
                    proceed=True
                elif person_id in disabled_person_ids:
                    pass
                else:
                    self.logger.warning ("PlImporter: cannot locate person_id %s in site %s - ignored"%(person_id,site_hrn))
                # make sure to NOT run this if anything is wrong
                if not proceed: continue

                person_hrn = email_to_hrn(site_hrn, person['email'])
                # xxx suspicious again
                if len(person_hrn) > 64: person_hrn = person_hrn[:64]
                person_urn = hrn_to_urn(person_hrn, 'user')

                user_record = self.locate_by_type_hrn ( 'user', person_hrn)

                # return a tuple pubkey (a plc key object) and pkey (a Keypair object)
                def init_person_key (person, plc_keys):
                    pubkey=None
                    if  person['key_ids']:
                        # randomly pick first key in set
                        pubkey = plc_keys[0]
                        try:
                            pkey = convert_public_key(pubkey['key'])
                        except:
                            self.logger.warn('PlImporter: unable to convert public key for %s' % person_hrn)
                            pkey = Keypair(create=True)
                    else:
                        # the user has no keys. Creating a random keypair for the user's gid
                        self.logger.warn("PlImporter: person %s does not have a PL public key"%person_hrn)
                        pkey = Keypair(create=True)
                    return (pubkey, pkey)

                # new person
                try:
                    plc_keys = keys_by_person_id.get(person['person_id'],[])
                    if not user_record:
                        (pubkey,pkey) = init_person_key (person, plc_keys )
                        person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey)
                        person_gid.set_email(person['email'])
                        user_record = RegUser (hrn=person_hrn, gid=person_gid, 
                                               pointer=person['person_id'], 
                                               authority=get_authority(person_hrn),
                                               email=person['email'])
                        if pubkey: 
                            user_record.reg_keys=[RegKey (pubkey['key'], pubkey['key_id'])]
                        else:
                            self.logger.warning("No key found for user %s"%user_record)
                        user_record.just_created()
                        dbsession.add (user_record)
                        dbsession.commit()
                        self.logger.info("PlImporter: imported person: %s" % user_record)
                        self.remember_record ( user_record )
                    else:
                        # update the record ?
                        #
                        # if a user key has changed then we need to update the
                        # users gid by forcing an update here
                        #
                        # right now, SFA only has *one* key attached to a user, and this is
                        # the key that the GID was made with
                        # so the logic here is, we consider that things are OK (unchanged) if
                        # all the SFA keys are present as PLC keys
                        # otherwise we trigger the creation of a new gid from *some* plc key
                        # and record this on the SFA side
                        # it would make sense to add a feature in PLC so that one could pick a 'primary'
                        # key but this is not available on the myplc side for now
                        # = or = it would be much better to support several keys in SFA but that
                        # does not seem doable without a major overhaul in the data model as
                        # a GID is attached to a hrn, but it's also linked to a key, so...
                        # NOTE: with this logic, the first key entered in PLC remains the one
                        # current in SFA until it is removed from PLC
                        sfa_keys = user_record.reg_keys
                        def sfa_key_in_list (sfa_key,plc_keys):
                            for plc_key in plc_keys:
                                if plc_key['key']==sfa_key.key:
                                    return True
                            return False
                        # are all the SFA keys known to PLC ?
                        new_keys=False
                        if not sfa_keys and plc_keys:
                            new_keys=True
                        else: 
                            for sfa_key in sfa_keys:
                                 if not sfa_key_in_list (sfa_key,plc_keys):
                                     new_keys = True
                        if new_keys:
                            (pubkey,pkey) = init_person_key (person, plc_keys)
                            person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey)
                            person_gid.set_email(person['email'])
                            if not pubkey:
                                user_record.reg_keys=[]
                            else:
                                user_record.reg_keys=[ RegKey (pubkey['key'], pubkey['key_id'])]
                            user_record.gid = person_gid
                            user_record.just_updated()
                            self.logger.info("PlImporter: updated person: %s" % user_record)
                    user_record.email = person['email']
                    dbsession.commit()
                    user_record.stale=False
                    # accumulate PIs - PLCAPI has a limitation that when someone has PI role
                    # this is valid for all sites she is in..
                    # PI is coded with role_id==20
                    if 20 in person['role_ids']:
                        site_pis.append (user_record)
                except:
                    self.logger.log_exc("PlImporter: failed to import person %d %s"%(person['person_id'],person['email']))
    
            # maintain the list of PIs for a given site
            # for the record, Jordan had proposed the following addition as a welcome hotfix to a previous version:
            # site_pis = list(set(site_pis)) 
            # this was likely due to a bug in the above logic, that had to do with disabled persons
            # being improperly handled, and where the whole loop on persons
            # could be performed twice with the same person...
            # so hopefully we do not need to eliminate duplicates explicitly here anymore
            site_record.reg_pis = site_pis
            dbsession.commit()

            # import slices
            for slice_id in site['slice_ids']:
                try:
                    slice = slices_by_id[slice_id]
                except:
                    self.logger.warning ("PlImporter: cannot locate slice_id %s - ignored"%slice_id)
                slice_hrn = slicename_to_hrn(interface_hrn, slice['name'])
                slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
                if not slice_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(slice_hrn, 'slice')
                        slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, 
                                                 pointer=slice['slice_id'],
                                                 authority=get_authority(slice_hrn))
                        slice_record.just_created()
                        dbsession.add(slice_record)
                        dbsession.commit()
                        self.logger.info("PlImporter: imported slice: %s" % slice_record)  
                        self.remember_record ( slice_record )
                    except:
                        self.logger.log_exc("PlImporter: failed to import slice %s (%s)"%(slice_hrn,slice['name']))
                else:
                    # xxx update the record ...
                    # given that we record the current set of users anyways, there does not seem to be much left to do here
                    # self.logger.warning ("Slice update not yet implemented on slice %s (%s)"%(slice_hrn,slice['name']))
                    pass
                # record current users affiliated with the slice
                slice_record.reg_researchers = \
                    [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['person_ids'] ]
                dbsession.commit()
                slice_record.stale=False

        ### remove stale records
        # special records must be preserved
        system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
        for record in all_records: 
            if record.hrn in system_hrns: 
                record.stale=False
            if record.peer_authority:
                record.stale=False
            if ".vini" in interface_hrn and interface_hrn.endswith('vini') and \
                record.hrn.endswith("internet2"):
                record.stale=False

        for record in all_records:
            try:        stale=record.stale
            except:     
                stale=True
                self.logger.warning("stale not found with %s"%record)
            if stale:
                self.logger.info("PlImporter: deleting stale record: %s" % record)
                dbsession.delete(record)
                dbsession.commit()
コード例 #6
0
ファイル: slabimporter.py プロジェクト: tubav/sfa
    def run (self, options):
        config = Config()

        slabdriver = SlabDriver(config)
        
        #Create special slice table for senslab 
        
        if not slabdriver.db.exists('slab_xp'):
            slabdriver.db.createtable()
            self.logger.info ("SlabImporter.run:  slab_xp table created ")

        #retrieve all existing SFA objects
        all_records = dbsession.query(RegRecord).all()
      
        #create hash by (type,hrn) 
        #used  to know if a given record is already known to SFA 
       
        self.records_by_type_hrn = \
            dict ( [ ( (record.type,record.hrn) , record ) for record in all_records ] )
        print>>sys.stderr,"\r\n SLABIMPORT \t all_records[0] %s all_records[0].email %s \r\n" %(all_records[0].type, all_records[0])
        self.users_rec_by_email = \
            dict ( [ (record.email, record) for record in all_records if record.type == 'user' ] )
            
        # create hash by (type,pointer) 
        self.records_by_type_pointer = \
            dict ( [ ( (str(record.type),record.pointer) , record ) for record in all_records  if record.pointer != -1] )

        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
        for record in all_records: 
            record.stale=True
        
        nodes_listdict  = slabdriver.GetNodes()
        nodes_by_id = dict([(node['node_id'],node) for node in nodes_listdict])
        sites_listdict  = slabdriver.GetSites()
        
        ldap_person_listdict = slabdriver.GetPersons()
        print>>sys.stderr,"\r\n SLABIMPORT \t ldap_person_listdict %s \r\n" %(ldap_person_listdict)
        slices_listdict = slabdriver.GetSlices()
        try:
            slices_by_userid = dict ( [ (one_slice['reg_researchers']['record_id'], one_slice ) for one_slice in slices_listdict ] )
        except TypeError:
             self.logger.log_exc("SlabImporter: failed to create list of slices by user id.") 
             pass
 
        for site in sites_listdict:
            site_hrn = _get_site_hrn(site) 
            site_record = self.find_record_by_type_hrn ('authority', site_hrn)
            if not site_record:
                try:
                    urn = hrn_to_urn(site_hrn, 'authority') 
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)
                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
                                               pointer='-1',
                                               authority=get_authority(site_hrn))
                    site_record.just_created()
                    dbsession.add(site_record)
                    dbsession.commit()
                    self.logger.info("SlabImporter: imported authority (site) : %s" % site_record) 
                    self.update_just_added_records_dict(site_record)
                except SQLAlchemyError:
                    # if the site import fails then there is no point in trying to import the
                    # site's child records (node, slices, persons), so skip them.
                    self.logger.log_exc("SlabImporter: failed to import site. Skipping child records") 
                    continue
            else:
                # xxx update the record ...
                pass
            site_record.stale=False 
            
         # import node records in site
            for node_id in site['node_ids']:
                try:
                    node = nodes_by_id[node_id]
                except:
                    self.logger.warning ("SlabImporter: cannot find node_id %s - ignored"%node_id)
                    continue 
                site_auth = get_authority(site_hrn)
                site_name = site['name']                
                escaped_hrn =  self.hostname_to_hrn_escaped(slabdriver.root_auth, node['hostname'])
                print>>sys.stderr, "\r\n \r\n SLABIMPORTER node %s " %(node)               
                hrn =  node['hrn']


                # xxx this sounds suspicious
                if len(hrn) > 64: hrn = hrn[:64]
                node_record = self.find_record_by_type_hrn( 'node', hrn )
                if not node_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(escaped_hrn, 'node') 
                        node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        def slab_get_authority(hrn):
                            return hrn.split(".")[0]
                            
                        node_record = RegNode (hrn=hrn, gid=node_gid, 
                                                pointer = '-1',
                                                authority=slab_get_authority(hrn)) 
                        node_record.just_created()
                        dbsession.add(node_record)
                        dbsession.commit()
                        #self.logger.info("SlabImporter: imported node: %s" % node_record)  
                        self.update_just_added_records_dict(node_record)
                    except:
                        self.logger.log_exc("SlabImporter: failed to import node") 
                else:
                    # xxx update the record ...
                    pass
                node_record.stale=False
                    
                    
        # import persons
        for person in ldap_person_listdict : 
            

            print>>sys.stderr,"SlabImporter: person: %s" %(person['hrn'])
            if 'ssh-rsa' not in person['pkey']:
                #people with invalid ssh key (ssh-dss, empty, bullshit keys...)
                #won't be imported
                continue
            person_hrn = person['hrn']
            slice_hrn = self.slicename_to_hrn(person['hrn'])
            
            # xxx suspicious again
            if len(person_hrn) > 64: person_hrn = person_hrn[:64]
            person_urn = hrn_to_urn(person_hrn, 'user')
            
            
            print>>sys.stderr," \r\n SlabImporter:  HEYYYYYYYYYY" , self.users_rec_by_email
            
            #Check if user using person['email'] form LDAP is already registered
            #in SFA. One email = one person. Inb this case, do not create another
            #record for this person
            #person_hrn  returned by GetPErson based on senslab root auth + uid ldap
            user_record = self.find_record_by_type_hrn('user', person_hrn)
            if not user_record and  person['email'] in self.users_rec_by_email:
                user_record = self.users_rec_by_email[person['email']]
                person_hrn = user_record.hrn
                person_urn = hrn_to_urn(person_hrn, 'user')
                
            
            slice_record = self.find_record_by_type_hrn ('slice', slice_hrn)
            
            # return a tuple pubkey (a plc key object) and pkey (a Keypair object)
            def init_person_key (person, slab_key):
                pubkey = None
                if  person['pkey']:
                    # randomly pick first key in set
                    pubkey = slab_key
                    
                    try:
                        pkey = convert_public_key(pubkey)
                    except TypeError:
                        #key not good. create another pkey
                        self.logger.warn('SlabImporter: \
                                            unable to convert public \
                                            key for %s' % person_hrn)
                        pkey = Keypair(create=True)
                    
                else:
                    # the user has no keys. Creating a random keypair for the user's gid
                    self.logger.warn("SlabImporter: person %s does not have a  public key"%person_hrn)
                    pkey = Keypair(create=True) 
                return (pubkey, pkey)
                            
                
            try:
                slab_key = person['pkey']
                # new person
                if not user_record:
                    (pubkey,pkey) = init_person_key (person, slab_key )
                    if pubkey is not None and pkey is not None :
                        person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey)
                        if person['email']:
                            print>>sys.stderr, "\r\n \r\n SLAB IMPORTER PERSON EMAIL OK email %s " %(person['email'])
                            person_gid.set_email(person['email'])
                            user_record = RegUser (hrn=person_hrn, gid=person_gid, 
                                                    pointer='-1', 
                                                    authority=get_authority(person_hrn),
                                                    email=person['email'])
                        else:
                            user_record = RegUser (hrn=person_hrn, gid=person_gid, 
                                                    pointer='-1', 
                                                    authority=get_authority(person_hrn))
                            
                        if pubkey: 
                            user_record.reg_keys = [RegKey (pubkey)]
                        else:
                            self.logger.warning("No key found for user %s"%user_record)
                        user_record.just_created()
                        dbsession.add (user_record)
                        dbsession.commit()
                        self.logger.info("SlabImporter: imported person: %s" % user_record)
                        self.update_just_added_records_dict( user_record )
                else:
                    # update the record ?
                    # if user's primary key has changed then we need to update the 
                    # users gid by forcing an update here
                    sfa_keys = user_record.reg_keys
                   
                    new_key=False
                    if slab_key is not sfa_keys : 
                        new_key = True
                    if new_key:
                        print>>sys.stderr,"SlabImporter: \t \t USER UPDATE person: %s" %(person['hrn'])
                        (pubkey,pkey) = init_person_key (person, slab_key)
                        person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey)
                        if not pubkey:
                            user_record.reg_keys=[]
                        else:
                            user_record.reg_keys=[ RegKey (pubkey)]
                        self.logger.info("SlabImporter: updated person: %s" % user_record)
                        
                    if person['email']:
                        user_record.email = person['email']
                        
                dbsession.commit()

                user_record.stale = False
            except:
                self.logger.log_exc("SlabImporter: failed to import person  %s"%(person) )       
            
            try:
                slice = slices_by_userid[user_record.record_id]
            except:
                self.logger.warning ("SlabImporter: cannot locate slices_by_userid[user_record.record_id] %s - ignored"%user_record)  
                    
            if not slice_record :
                try:
                    pkey = Keypair(create=True)
                    urn = hrn_to_urn(slice_hrn, 'slice')
                    slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                    slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, 
                                                pointer='-1',
                                                authority=get_authority(slice_hrn))
                    
                    slice_record.just_created()
                    dbsession.add(slice_record)
                    dbsession.commit()
                    
                    #Serial id created after commit
                    #Get it
                    sl_rec = dbsession.query(RegSlice).filter(RegSlice.hrn.match(slice_hrn)).all()
                    
                    #slab_slice = SenslabXP( slice_hrn = slice_hrn, record_id_slice=sl_rec[0].record_id, record_id_user= user_record.record_id)
                    #print>>sys.stderr, "\r\n \r\n SLAB IMPORTER SLICE IMPORT NOTslice_record %s \r\n slab_slice %s" %(sl_rec,slab_slice)
                    #slab_dbsession.add(slab_slice)
                    #slab_dbsession.commit()
                    #self.logger.info("SlabImporter: imported slice: %s" % slice_record)  
                    self.update_just_added_records_dict ( slice_record )

                except:
                    self.logger.log_exc("SlabImporter: failed to import slice")
                    
            #No slice update upon import in senslab 
            else:
                # xxx update the record ...
                self.logger.warning ("Slice update not yet implemented")
                pass
            # record current users affiliated with the slice


            slice_record.reg_researchers =  [user_record]
            dbsession.commit()
            slice_record.stale=False 
                       
  
                 
         ### remove stale records
        # special records must be preserved
        system_hrns = [slabdriver.hrn, slabdriver.root_auth,  slabdriver.hrn+ '.slicemanager']
        for record in all_records: 
            if record.hrn in system_hrns: 
                record.stale=False
            if record.peer_authority:
                record.stale=False
          

        for record in all_records: 
            if record.type == 'user':
                print>>sys.stderr,"SlabImporter: stale records: hrn %s %s" %(record.hrn,record.stale)
            try:        
                stale=record.stale
            except:     
                stale=True
                self.logger.warning("stale not found with %s"%record)
            if stale:
                self.logger.info("SlabImporter: deleting stale record: %s" % record)
                #if record.type == 'user':
                    #rec = slab_dbsession.query(SenslabXP).filter_by(record_id_user = record.record_id).first()
                    #slab_dbsession.delete(rec)
                    #slab_dbsession.commit()
                dbsession.delete(record)
                dbsession.commit()