Beispiel #1
0
def TestSQL(arg = None):
    from sfa.storage.model import make_record, RegSlice, RegRecord
    from sfa.storage.alchemy import global_dbsession


    from sqlalchemy.orm import joinedload

    slice_hrn = 'iotlab.avakian_slice'
    request =  global_dbsession.query(RegSlice).options(joinedload('reg_researchers'))
    solo_query_slice_list = request.filter_by(hrn=slice_hrn).first()

    print "\r\n \r\n ===========      solo_query_slice_list  RegSlice \
    joinedload('reg_researchers')   slice_hrn %s  first %s \r\n \t "\
    %(slice_hrn, solo_query_slice_list.__dict__)

    query_slice_list = request.all()
    print "\r\n \r\n ===========      query_slice_list RegSlice \
    joinedload('reg_researchers')   ALL  \r\n \t", \
    query_slice_list[0].__dict__

    return_slicerec_dictlist = []
    record = query_slice_list[0]
    print "\r\n \r\n ===========   \r\n \t", record

    tmp = record.__dict__
    print "\r\n \r\n ===========   \r\n \t", tmp
    tmp['reg_researchers'] = tmp['reg_researchers'][0].__dict__
    print "\r\n \r\n ===========   \r\n \t", tmp
        #del tmp['reg_researchers']['_sa_instance_state']
    return_slicerec_dictlist.append(tmp)

    print "\r\n \r\n ===========   \r\n \t", return_slicerec_dictlist

    all_records = global_dbsession.query(RegRecord).all()
Beispiel #2
0
    def run(self, options):
        # we don't have any options for now
        self.logger.info("OpenstackImporter.run : to do")

        # create dict of all existing sfa records
        existing_records = {}
        existing_hrns = []
        key_ids = []
        for record in global_dbsession.query(RegRecord):
            existing_records[(
                record.hrn,
                record.type,
            )] = record
            existing_hrns.append(record.hrn)

        tenants_dict = self.import_tenants(existing_hrns, existing_records)
        users_dict, user_keys = self.import_users(existing_hrns,
                                                  existing_records)

        # remove stale records
        system_records = [
            self.interface_hrn, self.root_auth,
            self.interface_hrn + '.slicemanager'
        ]
        for (record_hrn, type) in existing_records.keys():
            if record_hrn in system_records:
                continue

            record = existing_records[(record_hrn, type)]
            if record.peer_authority:
                continue

            if type == 'user':
                if record_hrn in users_dict:
                    continue
            elif type in ['slice', 'authority']:
                if record_hrn in tenants_dict:
                    continue
            else:
                continue

            record_object = existing_records[(record_hrn, type)]
            self.logger.info("OpenstackImporter: removing %s " % record)
            global_dbsession.delete(record_object)
            global_dbsession.commit()

        # save pub keys
        self.logger.info('OpenstackImporter: saving current pub keys')
        keys_filename = self.config.config_path + os.sep + 'person_keys.py'
        save_keys(keys_filename, user_keys)
Beispiel #3
0
    def run (self, options):
        # we don't have any options for now
        self.logger.info ("OpenstackImporter.run : to do")

        # create dict of all existing sfa records
        existing_records = {}
        existing_hrns = []
        key_ids = []
        for record in global_dbsession.query(RegRecord):
            existing_records[ (record.hrn, record.type,) ] = record
            existing_hrns.append(record.hrn) 
            

        tenants_dict = self.import_tenants(existing_hrns, existing_records)
        users_dict, user_keys = self.import_users(existing_hrns, existing_records)
                
        # remove stale records    
        system_records = [self.interface_hrn, self.root_auth, self.interface_hrn + '.slicemanager']
        for (record_hrn, type) in existing_records.keys():
            if record_hrn in system_records:
                continue
        
            record = existing_records[(record_hrn, type)]
            if record.peer_authority:
                continue

            if type == 'user':
                if record_hrn in users_dict:
                    continue  
            elif type in['slice', 'authority']:
                if record_hrn in tenants_dict:
                    continue
            else:
                continue 
        
            record_object = existing_records[ (record_hrn, type) ]
            self.logger.info("OpenstackImporter: removing %s " % record)
            global_dbsession.delete(record_object)
            global_dbsession.commit()
                                   
        # save pub keys
        self.logger.info('OpenstackImporter: saving current pub keys')
        keys_filename = self.config.config_path + os.sep + 'person_keys.py'
        save_keys(keys_filename, user_keys)                
Beispiel #4
0
    def __init__(self, auth_hierarchy, loc_logger):
        """
        Sets and defines import logger and the authority name. Gathers all the
        records already registerd in the SFA DB, broke them into 3 dicts, by
        type and hrn, by email and by type and pointer.

        :param auth_hierarchy: authority name
        :type auth_hierarchy: string
        :param loc_logger: local logger
        :type loc_logger: _SfaLogger

        """
        self.auth_hierarchy = auth_hierarchy
        self.logger = loc_logger
        self.logger.setLevelDebug()

        #retrieve all existing SFA objects
        self.all_records = global_dbsession.query(RegRecord).all()

        # initialize record.stale to True by default,
        # then mark stale=False on the ones that are in use
        for record in self.all_records:
            record.stale = True
        #create hash by (type,hrn)
        #used  to know if a given record is already known to SFA
        self.records_by_type_hrn = \
            dict([((record.type, record.hrn), record)
                 for record in self.all_records])

        self.users_rec_by_email = \
            dict([(record.email, record)
                 for record in self.all_records if record.type == 'user'])

        # create hash by (type,pointer)
        self.records_by_type_pointer = \
            dict([((str(record.type), record.pointer), record)
                  for record in self.all_records if record.pointer != -1])
Beispiel #5
0
    def __init__(self, auth_hierarchy, loc_logger):
        """
        Sets and defines import logger and the authority name. Gathers all the
        records already registerd in the SFA DB, broke them into 3 dicts, by
        type and hrn, by email and by type and pointer.

        :param auth_hierarchy: authority name
        :type auth_hierarchy: string
        :param loc_logger: local logger
        :type loc_logger: _SfaLogger

        """
        self.auth_hierarchy = auth_hierarchy
        self.logger = loc_logger
        self.logger.setLevelDebug()

        #retrieve all existing SFA objects
        self.all_records = global_dbsession.query(RegRecord).all()

        # initialize record.stale to True by default,
        # then mark stale=False on the ones that are in use
        for record in self.all_records:
            record.stale = True
        #create hash by (type,hrn)
        #used  to know if a given record is already known to SFA
        self.records_by_type_hrn = \
            dict([((record.type, record.hrn), record)
                 for record in self.all_records])

        self.users_rec_by_email = \
            dict([(record.email, record)
                 for record in self.all_records if record.type == 'user'])

        # create hash by (type,pointer)
        self.records_by_type_pointer = \
            dict([((str(record.type), record.pointer), record)
                  for record in self.all_records if record.pointer != -1])
Beispiel #6
0
    def run (self, options):
        config = Config ()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = PlShell (config)

        ######## retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()

        # create hash by (type,hrn) 
        # we essentially use this to know if a given record is already known to SFA 
        self.records_by_type_hrn = \
            dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] )
        # create hash by (type,pointer) 
        self.records_by_type_pointer = \
            dict ( [ ( (record.type, record.pointer) , record ) for record in all_records 
                     if record.pointer != -1] )

        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
        for record in all_records: record.stale=True

        ######## retrieve PLC data
        # Get all plc sites
        # retrieve only required stuf
        sites = shell.GetSites({'peer_id': None, 'enabled' : True},
                               ['site_id','login_base','node_ids','slice_ids','person_ids', 'name', 'hrn'])
        # create a hash of sites by login_base
#        sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
        # Get all plc users
        persons = shell.GetPersons({'peer_id': None, 'enabled': True}, 
                                   ['person_id', 'email', 'key_ids', 'site_ids', 'role_ids', 'hrn'])
        # create a hash of persons by person_id
        persons_by_id = dict ( [ ( person['person_id'], person) for person in persons ] )
        # also gather non-enabled user accounts so as to issue relevant warnings
        disabled_persons = shell.GetPersons({'peer_id': None, 'enabled': False}, ['person_id'])
        disabled_person_ids = [ person['person_id'] for person in disabled_persons ] 
        # Get all plc public keys
        # accumulate key ids for keys retrieval
        key_ids = []
        for person in persons:
            key_ids.extend(person['key_ids'])
        keys = shell.GetKeys( {'peer_id': None, 'key_id': key_ids,
                               'key_type': 'ssh'} )
        # create a hash of keys by key_id
        keys_by_id = dict ( [ ( key['key_id'], key ) for key in keys ] ) 
        # create a dict person_id -> [ (plc)keys ]
        keys_by_person_id = {} 
        for person in persons:
            pubkeys = []
            for key_id in person['key_ids']:
                # by construction all the keys we fetched are ssh keys
                # so gpg keys won't be in there
                try:
                    key = keys_by_id[key_id]
                    pubkeys.append(key)
                except:
                    self.logger.warning("Could not spot key %d - probably non-ssh"%key_id)
            keys_by_person_id[person['person_id']] = pubkeys
        # Get all plc nodes  
        nodes = shell.GetNodes( {'peer_id': None}, ['node_id', 'hostname', 'site_id'])
        # create hash by node_id
        nodes_by_id = dict ( [ ( node['node_id'], node, ) for node in nodes ] )
        # Get all plc slices
        slices = shell.GetSlices( {'peer_id': None}, ['slice_id', 'name', 'person_ids', 'hrn'])
        # create hash by slice_id
        slices_by_id = dict ( [ (slice['slice_id'], slice ) for slice in slices ] )

        # isolate special vini case in separate method
        self.create_special_vini_record (interface_hrn)

        # Get top authority record
        top_auth_record=self.locate_by_type_hrn ('authority', root_auth)
        admins = []

        # start importing 
        for site in sites:
            try:
               site_sfa_created = shell.GetSiteSfaCreated(site['site_id'])
            except: 
               site_sfa_created = None
            if site['name'].startswith('sfa:') or site_sfa_created == 'True':
                continue

            #site_hrn = _get_site_hrn(interface_hrn, site)
            site_hrn = site['hrn']
            # import if hrn is not in list of existing hrns or if the hrn exists
            # but its not a site record
            site_record=self.locate_by_type_hrn ('authority', site_hrn)
            if not site_record:
                try:
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)
                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
                                               pointer=site['site_id'],
                                               authority=get_authority(site_hrn))
                    site_record.just_created()
                    global_dbsession.add(site_record)
                    global_dbsession.commit()
                    self.logger.info("PlImporter: imported authority (site) : %s" % site_record) 
                    self.remember_record (site_record)
                except:
                    # if the site import fails then there is no point in trying to import the
                    # site's child records (node, slices, persons), so skip them.
                    self.logger.log_exc("PlImporter: failed to import site %s. Skipping child records"%site_hrn) 
                    continue 
            else:
                # xxx update the record ...
                pass
            site_record.stale=False
             
            # import node records
            for node_id in site['node_ids']:
                try:
                    node = nodes_by_id[node_id]
                except:
                    self.logger.warning ("PlImporter: cannot find node_id %s - ignored"%node_id)
                    continue 
                site_auth = get_authority(site_hrn)
                site_name = site['login_base']
                node_hrn =  hostname_to_hrn(site_auth, site_name, node['hostname'])
                # xxx this sounds suspicious
                if len(node_hrn) > 64: node_hrn = node_hrn[:64]
                node_record = self.locate_by_type_hrn ( 'node', node_hrn )
                if not node_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(node_hrn, 'node')
                        node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        node_record = RegNode (hrn=node_hrn, gid=node_gid, 
                                               pointer =node['node_id'],
                                               authority=get_authority(node_hrn))
                        node_record.just_created()
                        global_dbsession.add(node_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported node: %s" % node_record)  
                        self.remember_record (node_record)
                    except:
                        self.logger.log_exc("PlImporter: failed to import node %s"%node_hrn) 
                        continue
                else:
                    # xxx update the record ...
                    pass
                node_record.stale=False

            site_pis=[]
            # import persons
            for person_id in site['person_ids']:
                proceed=False
                if person_id in persons_by_id:
                    person=persons_by_id[person_id]
                    proceed=True
                elif person_id in disabled_person_ids:
                    pass
                else:
                    self.logger.warning ("PlImporter: cannot locate person_id %s in site %s - ignored"%(person_id,site_hrn))
                # make sure to NOT run this if anything is wrong
                if not proceed: continue

                #person_hrn = email_to_hrn(site_hrn, person['email'])
                person_hrn = person['hrn']
                if person_hrn is None:
                    self.logger.warn("Person %s has no hrn - skipped"%person['email'])
                    continue
                # xxx suspicious again
                if len(person_hrn) > 64: person_hrn = person_hrn[:64]
                person_urn = hrn_to_urn(person_hrn, 'user')

                user_record = self.locate_by_type_hrn ( 'user', person_hrn)

                # return a tuple pubkey (a plc key object) and pkey (a Keypair object)
                def init_person_key (person, plc_keys):
                    pubkey=None
                    if  person['key_ids']:
                        # randomly pick first key in set
                        pubkey = plc_keys[0]
                        try:
                            pkey = convert_public_key(pubkey['key'])
                        except:
                            self.logger.warn('PlImporter: unable to convert public key for %s' % person_hrn)
                            pkey = Keypair(create=True)
                    else:
                        # the user has no keys. Creating a random keypair for the user's gid
                        self.logger.warn("PlImporter: person %s does not have a PL public key"%person_hrn)
                        pkey = Keypair(create=True)
                    return (pubkey, pkey)

                # new person
                try:
                    plc_keys = keys_by_person_id.get(person['person_id'],[])
                    if not user_record:
                        (pubkey,pkey) = init_person_key (person, plc_keys )
                        person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey, email=person['email'])
                        user_record = RegUser (hrn=person_hrn, gid=person_gid, 
                                               pointer=person['person_id'], 
                                               authority=get_authority(person_hrn),
                                               email=person['email'])
                        if pubkey: 
                            user_record.reg_keys=[RegKey (pubkey['key'], pubkey['key_id'])]
                        else:
                            self.logger.warning("No key found for user %s"%user_record)
                        user_record.just_created()
                        global_dbsession.add (user_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported person: %s" % user_record)
                        self.remember_record ( user_record )
                    else:
                        # update the record ?
                        #
                        # if a user key has changed then we need to update the
                        # users gid by forcing an update here
                        #
                        # right now, SFA only has *one* key attached to a user, and this is
                        # the key that the GID was made with
                        # so the logic here is, we consider that things are OK (unchanged) if
                        # all the SFA keys are present as PLC keys
                        # otherwise we trigger the creation of a new gid from *some* plc key
                        # and record this on the SFA side
                        # it would make sense to add a feature in PLC so that one could pick a 'primary'
                        # key but this is not available on the myplc side for now
                        # = or = it would be much better to support several keys in SFA but that
                        # does not seem doable without a major overhaul in the data model as
                        # a GID is attached to a hrn, but it's also linked to a key, so...
                        # NOTE: with this logic, the first key entered in PLC remains the one
                        # current in SFA until it is removed from PLC
                        sfa_keys = user_record.reg_keys
                        def sfa_key_in_list (sfa_key,plc_keys):
                            for plc_key in plc_keys:
                                if plc_key['key']==sfa_key.key:
                                    return True
                            return False
                        # are all the SFA keys known to PLC ?
                        new_keys=False
                        if not sfa_keys and plc_keys:
                            new_keys=True
                        else: 
                            for sfa_key in sfa_keys:
                                 if not sfa_key_in_list (sfa_key,plc_keys):
                                     new_keys = True
                        if new_keys:
                            (pubkey,pkey) = init_person_key (person, plc_keys)
                            person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey)
                            person_gid.set_email(person['email'])
                            if not pubkey:
                                user_record.reg_keys=[]
                            else:
                                user_record.reg_keys=[ RegKey (pubkey['key'], pubkey['key_id'])]
                            user_record.gid = person_gid
                            user_record.just_updated()
                            self.logger.info("PlImporter: updated person: %s" % user_record)
                    user_record.email = person['email']
                    global_dbsession.commit()
                    user_record.stale=False
                    # accumulate PIs - PLCAPI has a limitation that when someone has PI role
                    # this is valid for all sites she is in..
                    # PI is coded with role_id==20
                    if 20 in person['role_ids']:
                        site_pis.append (user_record)

                    # PL Admins need to marked as PI of the top authority record
                    if 10 in person['role_ids'] and user_record not in top_auth_record.reg_pis:
                        admins.append(user_record)

                except:
                    self.logger.log_exc("PlImporter: failed to import person %d %s"%(person['person_id'],person['email']))
    
            # maintain the list of PIs for a given site
            # for the record, Jordan had proposed the following addition as a welcome hotfix to a previous version:
            # site_pis = list(set(site_pis)) 
            # this was likely due to a bug in the above logic, that had to do with disabled persons
            # being improperly handled, and where the whole loop on persons
            # could be performed twice with the same person...
            # so hopefully we do not need to eliminate duplicates explicitly here anymore
            site_record.reg_pis = list(set(site_pis))
            global_dbsession.commit()

            # import slices
            for slice_id in site['slice_ids']:
                try:
                    slice = slices_by_id[slice_id]
                except:
                    self.logger.warning ("PlImporter: cannot locate slice_id %s - ignored"%slice_id)
                    continue
                #slice_hrn = slicename_to_hrn(interface_hrn, slice['name'])
                slice_hrn = slice['hrn']
                if slice_hrn is None:
                    self.logger.warning("Slice %s has no hrn - skipped"%slice['name'])
                    continue
                slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
                if not slice_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(slice_hrn, 'slice')
                        slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, 
                                                 pointer=slice['slice_id'],
                                                 authority=get_authority(slice_hrn))
                        slice_record.just_created()
                        global_dbsession.add(slice_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported slice: %s" % slice_record)  
                        self.remember_record ( slice_record )
                    except:
                        self.logger.log_exc("PlImporter: failed to import slice %s (%s)"%(slice_hrn,slice['name']))
                else:
                    # xxx update the record ...
                    # given that we record the current set of users anyways, there does not seem to be much left to do here
                    # self.logger.warning ("Slice update not yet implemented on slice %s (%s)"%(slice_hrn,slice['name']))
                    pass
                # record current users affiliated with the slice
                slice_record.reg_researchers = \
                    [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['person_ids'] ]
                global_dbsession.commit()
                slice_record.stale=False

        # Set PL Admins as PI's of the top authority
        if admins:
            top_auth_record.reg_pis = list(set(admins))
            global_dbsession.commit()
            self.logger.info('PlImporter: set PL admins %s as PIs of %s'%(admins,top_auth_record.hrn))

        ### remove stale records
        # special records must be preserved
        system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
        for record in all_records: 
            if record.hrn in system_hrns: 
                record.stale=False
            if record.peer_authority:
                record.stale=False
            if ".vini" in interface_hrn and interface_hrn.endswith('vini') and \
                record.hrn.endswith("internet2"):
                record.stale=False

        for record in all_records:
            try:        stale=record.stale
            except:     
                stale=True
                self.logger.warning("stale not found with %s"%record)
            if stale:
                self.logger.info("PlImporter: deleting stale record: %s" % record)
                global_dbsession.delete(record)
                global_dbsession.commit()
Beispiel #7
0
    def run (self, options):
        config = Config ()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = DummyShell (config)

        ######## retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()

        # create hash by (type,hrn) 
        # we essentially use this to know if a given record is already known to SFA 
        self.records_by_type_hrn = \
            dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] )
        # create hash by (type,pointer) 
        self.records_by_type_pointer = \
            dict ( [ ( (record.type, record.pointer) , record ) for record in all_records 
                     if record.pointer != -1] )

        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
        for record in all_records: record.stale=True
        
        # DEBUG
        #all_records = global_dbsession.query(RegRecord).all()
        #for record in all_records: print record

        ######## retrieve Dummy TB data
        # Get all plc sites
        # retrieve only required stuf
        sites = [shell.GetTestbedInfo()]
        print "sites: " + sites
        # create a hash of sites by login_base
#        sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
        # Get all dummy TB users
        users = shell.GetUsers()
        # create a hash of users by user_id
        users_by_id = dict ( [ ( user['user_id'], user) for user in users ] )
        # Get all dummy TB public keys
        keys = []
        for user in users:
            if 'keys' in user:
                keys.extend(user['keys'])
        # create a dict user_id -> [ keys ]
        keys_by_person_id = {} 
        for user in users:
             if 'keys' in user:
                 keys_by_person_id[user['user_id']] = user['keys']
        # Get all dummy TB nodes  
        nodes = shell.GetNodes()
        # create hash by node_id
        nodes_by_id = dict ( [ ( node['node_id'], node, ) for node in nodes ] )
        # Get all dummy TB slices
        slices = shell.GetSlices()
        # create hash by slice_id
        slices_by_id = dict ( [ (slice['slice_id'], slice ) for slice in slices ] )


        # start importing
        print " STARTING FOR SITES" 
        for site in sites:
            site_hrn = _get_site_hrn(interface_hrn, site)
            # import if hrn is not in list of existing hrns or if the hrn exists
            # but its not a site record
            site_record=self.locate_by_type_hrn ('authority', site_hrn)
            print site_hrn
            print site_record
            if not site_record:
                try:
                    print "TRY TO CREATE SITE RECORD"
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        print "create auth "+urn
                        self.auth_hierarchy.create_auth(urn)
                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
                                               pointer= -1,
                                               authority=get_authority(site_hrn))
                    site_record.just_created()
                    print "urn: "+urn
                    print "auth_info: " + auth_info
                    print site_record
                    global_dbsession.add(site_record)
                    global_dbsession.commit()
                    self.logger.info("DummyImporter: imported authority (site) : %s" % site_record) 
                    self.remember_record (site_record)
                except:
                    # if the site import fails then there is no point in trying to import the
                    # site's child records (node, slices, persons), so skip them.
                    self.logger.log_exc("DummyImporter: failed to import site. Skipping child records") 
                    continue 
            else:
                # xxx update the record ...
                pass
            site_record.stale=False
             
            # import node records
            for node in nodes:
                site_auth = get_authority(site_hrn)
                site_name = site['name']
                node_hrn =  hostname_to_hrn(site_auth, site_name, node['hostname'])
                # xxx this sounds suspicious
                if len(node_hrn) > 64: node_hrn = node_hrn[:64]
                node_record = self.locate_by_type_hrn ( 'node', node_hrn )
                if not node_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(node_hrn, 'node')
                        node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        node_record = RegNode (hrn=node_hrn, gid=node_gid, 
                                               pointer =node['node_id'],
                                               authority=get_authority(node_hrn))
                        node_record.just_created()
                        global_dbsession.add(node_record)
                        global_dbsession.commit()
                        self.logger.info("DummyImporter: imported node: %s" % node_record)  
                        self.remember_record (node_record)
                    except:
                        self.logger.log_exc("DummyImporter: failed to import node") 
                else:
                    # xxx update the record ...
                    pass
                node_record.stale=False
            
            all_records = global_dbsession.query(RegRecord).all()
            for record in all_records: print record
            
            site_pis=[]
            # import users
            for user in users:
                user_hrn = email_to_hrn(site_hrn, user['email'])
                # xxx suspicious again
                if len(user_hrn) > 64: user_hrn = user_hrn[:64]
                user_urn = hrn_to_urn(user_hrn, 'user')

                user_record = self.locate_by_type_hrn ( 'user', user_hrn)

                # return a tuple pubkey (a dummy TB key object) and pkey (a Keypair object)

                def init_user_key (user):
                    pubkey = None
                    pkey = None
                    if  user['keys']:
                        # randomly pick first key in set
                        for key in user['keys']:
                             pubkey = key
                             try:
                                pkey = convert_public_key(pubkey)
                                break
                             except:
                                continue
                        if not pkey:
                            self.logger.warn('DummyImporter: unable to convert public key for %s' % user_hrn)
                            pkey = Keypair(create=True)
                    else:
                        # the user has no keys. Creating a random keypair for the user's gid
                        self.logger.warn("DummyImporter: user %s does not have a NITOS public key"%user_hrn)
                        pkey = Keypair(create=True)
                    return (pubkey, pkey)

                # new user
                try:
                    if not user_record:
                        (pubkey,pkey) = init_user_key (user)
                        user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
                        user_gid.set_email(user['email'])
                        user_record = RegUser (hrn=user_hrn, gid=user_gid, 
                                                 pointer=user['user_id'], 
                                                 authority=get_authority(user_hrn),
                                                 email=user['email'])
                        if pubkey: 
                            user_record.reg_keys=[RegKey (pubkey)]
                        else:
                            self.logger.warning("No key found for user %s"%user_record)
                        user_record.just_created()
                        global_dbsession.add (user_record)
                        global_dbsession.commit()
                        self.logger.info("DummyImporter: imported person: %s" % user_record)
                        self.remember_record ( user_record )

                    else:
                        # update the record ?
                        # if user's primary key has changed then we need to update the 
                        # users gid by forcing an update here
                        sfa_keys = user_record.reg_keys
                        def key_in_list (key,sfa_keys):
                            for reg_key in sfa_keys:
                                if reg_key.key==key: return True
                            return False
                        # is there a new key in Dummy TB ?
                        new_keys=False
                        for key in user['keys']:
                            if not key_in_list (key,sfa_keys):
                                new_keys = True
                        if new_keys:
                            (pubkey,pkey) = init_user_key (user)
                            user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
                            if not pubkey:
                                user_record.reg_keys=[]
                            else:
                                user_record.reg_keys=[ RegKey (pubkey)]
                            self.logger.info("DummyImporter: updated person: %s" % user_record)
                    user_record.email = user['email']
                    global_dbsession.commit()
                    user_record.stale=False
                except:
                    self.logger.log_exc("DummyImporter: failed to import user %d %s"%(user['user_id'],user['email']))
    

            # import slices
            for slice in slices:
                slice_hrn = slicename_to_hrn(site_hrn, slice['slice_name'])
                slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
                if not slice_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(slice_hrn, 'slice')
                        slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, 
                                                 pointer=slice['slice_id'],
                                                 authority=get_authority(slice_hrn))
                        slice_record.just_created()
                        global_dbsession.add(slice_record)
                        global_dbsession.commit()
                        self.logger.info("DummyImporter: imported slice: %s" % slice_record)  
                        self.remember_record ( slice_record )
                    except:
                        self.logger.log_exc("DummyImporter: failed to import slice")
                else:
                    # xxx update the record ...
                    self.logger.warning ("Slice update not yet implemented")
                    pass
                # record current users affiliated with the slice
                slice_record.reg_researchers = \
                    [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['user_ids'] ]
                global_dbsession.commit()
                slice_record.stale=False

        ### remove stale records
        # special records must be preserved
        system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
        for record in all_records: 
            if record.hrn in system_hrns: 
                record.stale=False
            if record.peer_authority:
                record.stale=False

        for record in all_records:
            try:        stale=record.stale
            except:     
                stale=True
                self.logger.warning("stale not found with %s"%record)
            if stale:
                self.logger.info("DummyImporter: deleting stale record: %s" % record)
                global_dbsession.delete(record)
                global_dbsession.commit()
Beispiel #8
0
    def run(self, options):
        config = Config()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = PlShell(config)

        ######## retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()

        # create hash by (type,hrn)
        # we essentially use this to know if a given record is already known to SFA
        self.records_by_type_hrn = \
            dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] )
        # create hash by (type,pointer)
        self.records_by_type_pointer = \
            dict ( [ ( (record.type, record.pointer) , record ) for record in all_records
                     if record.pointer != -1] )

        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
        for record in all_records:
            record.stale = True

        ######## retrieve PLC data
        # Get all plc sites
        # retrieve only required stuf
        sites = shell.GetSites({
            'peer_id': None,
            'enabled': True
        }, [
            'site_id', 'login_base', 'node_ids', 'slice_ids', 'person_ids',
            'name'
        ])
        # create a hash of sites by login_base
        #        sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
        # Get all plc users
        persons = shell.GetPersons({
            'peer_id': None,
            'enabled': True
        }, ['person_id', 'email', 'key_ids', 'site_ids', 'role_ids'])
        # create a hash of persons by person_id
        persons_by_id = dict([(person['person_id'], person)
                              for person in persons])
        # also gather non-enabled user accounts so as to issue relevant warnings
        disabled_persons = shell.GetPersons({
            'peer_id': None,
            'enabled': False
        }, ['person_id'])
        disabled_person_ids = [
            person['person_id'] for person in disabled_persons
        ]
        # Get all plc public keys
        # accumulate key ids for keys retrieval
        key_ids = []
        for person in persons:
            key_ids.extend(person['key_ids'])
        keys = shell.GetKeys({
            'peer_id': None,
            'key_id': key_ids,
            'key_type': 'ssh'
        })
        # create a hash of keys by key_id
        keys_by_id = dict([(key['key_id'], key) for key in keys])
        # create a dict person_id -> [ (plc)keys ]
        keys_by_person_id = {}
        for person in persons:
            pubkeys = []
            for key_id in person['key_ids']:
                # by construction all the keys we fetched are ssh keys
                # so gpg keys won't be in there
                try:
                    key = keys_by_id[key_id]
                    pubkeys.append(key)
                except:
                    self.logger.warning(
                        "Could not spot key %d - probably non-ssh" % key_id)
            keys_by_person_id[person['person_id']] = pubkeys
        # Get all plc nodes
        nodes = shell.GetNodes({'peer_id': None},
                               ['node_id', 'hostname', 'site_id'])
        # create hash by node_id
        nodes_by_id = dict([(
            node['node_id'],
            node,
        ) for node in nodes])
        # Get all plc slices
        slices = shell.GetSlices({'peer_id': None},
                                 ['slice_id', 'name', 'person_ids'])
        # create hash by slice_id
        slices_by_id = dict([(slice['slice_id'], slice) for slice in slices])

        # isolate special vini case in separate method
        self.create_special_vini_record(interface_hrn)

        # start importing
        for site in sites:
            try:
                site_sfa_created = shell.GetSiteSfaCreated(site['site_id'])
            except:
                site_sfa_created = None
            if site['name'].startswith('sfa:') or site_sfa_created == 'True':
                continue

            site_hrn = _get_site_hrn(interface_hrn, site)
            # import if hrn is not in list of existing hrns or if the hrn exists
            # but its not a site record
            site_record = self.locate_by_type_hrn('authority', site_hrn)
            if not site_record:
                try:
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)
                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    site_record = RegAuthority(
                        hrn=site_hrn,
                        gid=auth_info.get_gid_object(),
                        pointer=site['site_id'],
                        authority=get_authority(site_hrn))
                    site_record.just_created()
                    global_dbsession.add(site_record)
                    global_dbsession.commit()
                    self.logger.info(
                        "PlImporter: imported authority (site) : %s" %
                        site_record)
                    self.remember_record(site_record)
                except:
                    # if the site import fails then there is no point in trying to import the
                    # site's child records (node, slices, persons), so skip them.
                    self.logger.log_exc(
                        "PlImporter: failed to import site %s. Skipping child records"
                        % site_hrn)
                    continue
            else:
                # xxx update the record ...
                pass
            site_record.stale = False

            # import node records
            for node_id in site['node_ids']:
                try:
                    node = nodes_by_id[node_id]
                except:
                    self.logger.warning(
                        "PlImporter: cannot find node_id %s - ignored" %
                        node_id)
                    continue
                site_auth = get_authority(site_hrn)
                site_name = site['login_base']
                node_hrn = hostname_to_hrn(site_auth, site_name,
                                           node['hostname'])
                # xxx this sounds suspicious
                if len(node_hrn) > 64: node_hrn = node_hrn[:64]
                node_record = self.locate_by_type_hrn('node', node_hrn)
                if not node_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(node_hrn, 'node')
                        node_gid = self.auth_hierarchy.create_gid(
                            urn, create_uuid(), pkey)
                        node_record = RegNode(
                            hrn=node_hrn,
                            gid=node_gid,
                            pointer=node['node_id'],
                            authority=get_authority(node_hrn))
                        node_record.just_created()
                        global_dbsession.add(node_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported node: %s" %
                                         node_record)
                        self.remember_record(node_record)
                    except:
                        self.logger.log_exc(
                            "PlImporter: failed to import node %s" % node_hrn)
                        continue
                else:
                    # xxx update the record ...
                    pass
                node_record.stale = False

            site_pis = []
            # import persons
            for person_id in site['person_ids']:
                proceed = False
                if person_id in persons_by_id:
                    person = persons_by_id[person_id]
                    proceed = True
                elif person_id in disabled_person_ids:
                    pass
                else:
                    self.logger.warning(
                        "PlImporter: cannot locate person_id %s in site %s - ignored"
                        % (person_id, site_hrn))
                # make sure to NOT run this if anything is wrong
                if not proceed: continue

                person_hrn = email_to_hrn(site_hrn, person['email'])
                # xxx suspicious again
                if len(person_hrn) > 64: person_hrn = person_hrn[:64]
                person_urn = hrn_to_urn(person_hrn, 'user')

                user_record = self.locate_by_type_hrn('user', person_hrn)

                # return a tuple pubkey (a plc key object) and pkey (a Keypair object)
                def init_person_key(person, plc_keys):
                    pubkey = None
                    if person['key_ids']:
                        # randomly pick first key in set
                        pubkey = plc_keys[0]
                        try:
                            pkey = convert_public_key(pubkey['key'])
                        except:
                            self.logger.warn(
                                'PlImporter: unable to convert public key for %s'
                                % person_hrn)
                            pkey = Keypair(create=True)
                    else:
                        # the user has no keys. Creating a random keypair for the user's gid
                        self.logger.warn(
                            "PlImporter: person %s does not have a PL public key"
                            % person_hrn)
                        pkey = Keypair(create=True)
                    return (pubkey, pkey)

                # new person
                try:
                    plc_keys = keys_by_person_id.get(person['person_id'], [])
                    if not user_record:
                        (pubkey, pkey) = init_person_key(person, plc_keys)
                        person_gid = self.auth_hierarchy.create_gid(
                            person_urn,
                            create_uuid(),
                            pkey,
                            email=person['email'])
                        user_record = RegUser(
                            hrn=person_hrn,
                            gid=person_gid,
                            pointer=person['person_id'],
                            authority=get_authority(person_hrn),
                            email=person['email'])
                        if pubkey:
                            user_record.reg_keys = [
                                RegKey(pubkey['key'], pubkey['key_id'])
                            ]
                        else:
                            self.logger.warning("No key found for user %s" %
                                                user_record)
                        user_record.just_created()
                        global_dbsession.add(user_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported person: %s" %
                                         user_record)
                        self.remember_record(user_record)
                    else:
                        # update the record ?
                        #
                        # if a user key has changed then we need to update the
                        # users gid by forcing an update here
                        #
                        # right now, SFA only has *one* key attached to a user, and this is
                        # the key that the GID was made with
                        # so the logic here is, we consider that things are OK (unchanged) if
                        # all the SFA keys are present as PLC keys
                        # otherwise we trigger the creation of a new gid from *some* plc key
                        # and record this on the SFA side
                        # it would make sense to add a feature in PLC so that one could pick a 'primary'
                        # key but this is not available on the myplc side for now
                        # = or = it would be much better to support several keys in SFA but that
                        # does not seem doable without a major overhaul in the data model as
                        # a GID is attached to a hrn, but it's also linked to a key, so...
                        # NOTE: with this logic, the first key entered in PLC remains the one
                        # current in SFA until it is removed from PLC
                        sfa_keys = user_record.reg_keys

                        def sfa_key_in_list(sfa_key, plc_keys):
                            for plc_key in plc_keys:
                                if plc_key['key'] == sfa_key.key:
                                    return True
                            return False

                        # are all the SFA keys known to PLC ?
                        new_keys = False
                        if not sfa_keys and plc_keys:
                            new_keys = True
                        else:
                            for sfa_key in sfa_keys:
                                if not sfa_key_in_list(sfa_key, plc_keys):
                                    new_keys = True
                        if new_keys:
                            (pubkey, pkey) = init_person_key(person, plc_keys)
                            person_gid = self.auth_hierarchy.create_gid(
                                person_urn, create_uuid(), pkey)
                            person_gid.set_email(person['email'])
                            if not pubkey:
                                user_record.reg_keys = []
                            else:
                                user_record.reg_keys = [
                                    RegKey(pubkey['key'], pubkey['key_id'])
                                ]
                            user_record.gid = person_gid
                            user_record.just_updated()
                            self.logger.info("PlImporter: updated person: %s" %
                                             user_record)
                    user_record.email = person['email']
                    global_dbsession.commit()
                    user_record.stale = False
                    # accumulate PIs - PLCAPI has a limitation that when someone has PI role
                    # this is valid for all sites she is in..
                    # PI is coded with role_id==20
                    if 20 in person['role_ids']:
                        site_pis.append(user_record)
                except:
                    self.logger.log_exc(
                        "PlImporter: failed to import person %d %s" %
                        (person['person_id'], person['email']))

            # maintain the list of PIs for a given site
            # for the record, Jordan had proposed the following addition as a welcome hotfix to a previous version:
            # site_pis = list(set(site_pis))
            # this was likely due to a bug in the above logic, that had to do with disabled persons
            # being improperly handled, and where the whole loop on persons
            # could be performed twice with the same person...
            # so hopefully we do not need to eliminate duplicates explicitly here anymore
            site_record.reg_pis = list(set(site_pis))
            global_dbsession.commit()

            # import slices
            for slice_id in site['slice_ids']:
                try:
                    slice = slices_by_id[slice_id]
                except:
                    self.logger.warning(
                        "PlImporter: cannot locate slice_id %s - ignored" %
                        slice_id)
                slice_hrn = slicename_to_hrn(interface_hrn, slice['name'])
                slice_record = self.locate_by_type_hrn('slice', slice_hrn)
                if not slice_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(slice_hrn, 'slice')
                        slice_gid = self.auth_hierarchy.create_gid(
                            urn, create_uuid(), pkey)
                        slice_record = RegSlice(
                            hrn=slice_hrn,
                            gid=slice_gid,
                            pointer=slice['slice_id'],
                            authority=get_authority(slice_hrn))
                        slice_record.just_created()
                        global_dbsession.add(slice_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported slice: %s" %
                                         slice_record)
                        self.remember_record(slice_record)
                    except:
                        self.logger.log_exc(
                            "PlImporter: failed to import slice %s (%s)" %
                            (slice_hrn, slice['name']))
                else:
                    # xxx update the record ...
                    # given that we record the current set of users anyways, there does not seem to be much left to do here
                    # self.logger.warning ("Slice update not yet implemented on slice %s (%s)"%(slice_hrn,slice['name']))
                    pass
                # record current users affiliated with the slice
                slice_record.reg_researchers = \
                    [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['person_ids'] ]
                global_dbsession.commit()
                slice_record.stale = False

        ### remove stale records
        # special records must be preserved
        system_hrns = [
            interface_hrn, root_auth, interface_hrn + '.slicemanager'
        ]
        for record in all_records:
            if record.hrn in system_hrns:
                record.stale = False
            if record.peer_authority:
                record.stale = False
            if ".vini" in interface_hrn and interface_hrn.endswith('vini') and \
                record.hrn.endswith("internet2"):
                record.stale = False

        for record in all_records:
            try:
                stale = record.stale
            except:
                stale = True
                self.logger.warning("stale not found with %s" % record)
            if stale:
                self.logger.info("PlImporter: deleting stale record: %s" %
                                 record)
                global_dbsession.delete(record)
                global_dbsession.commit()
Beispiel #9
0
    def run (self, options):
        config = Config ()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = ClabShell (config)
                
        # retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()
        
        # Delete all default records
        #for record in all_records:
        #    global_dbsession.delete(record)
        #    global_dbsession.commit()
        #all_records = global_dbsession.query(RegRecord).all()
        
        # Dicts to avoid duplicates in SFA database
        # create dict keyed by (type,hrn) 
        self.records_by_type_hrn = dict([((record.type, record.hrn), record) for record in all_records ] )
        # create dict keyed by (type,pointer) 
        self.records_by_type_pointer = dict([((record.type, record.pointer), record) for record in all_records if record.pointer != -1])
        
        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
        for record in all_records: 
            record.stale=True
        
        # Retrieve data from the CLab testbed and create dictionaries by id
        # SITE
        sites = [shell.get_testbed_info()]
        
        # USERS
        users = shell.get_users({})
        
        #users_by_id = dict ( [ ( user['id'], user) for user in users ] )
        # KEYS
        # auth_tokens of the users. Dict (user_id:[keys])
        
        # NODES
        nodes = shell.get_nodes({})
        
        # SLICES
        slices = shell.get_slices({})
        
        
        # Import records to the SFA registry
        # SITE
        for site in sites:
            # Get hrn of the site (authority)
            site_hrn = _get_site_hrn(interface_hrn, site)
            # Try to locate the site_hrn in the SFA records
            site_record=self.locate_by_type_hrn ('authority', site_hrn)
            
            if not site_record:
                # Create/Import record for the site authority
                try:
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)
                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    # Create record for the site authority and add it to the Registry
                    site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
                                               pointer= -1,
                                               authority=get_authority(site_hrn))
                    site_record.just_created()
                    global_dbsession.add(site_record)
                    global_dbsession.commit()
                    self.logger.info("CLabImporter: imported authority (site) : %s" % site_hrn) 
                    self.remember_record (site_record)
                except:
                    # if the site import fails then there is no point in trying to import the
                    # site's child records (node, slices, persons), so skip them.
                    self.logger.log_exc("CLabImporter: failed to import site. Skipping child records") 
                    continue 
            else:
                # Authority record already in the SFA registry. Update?
                pass
            
            # Fresh record in SFA Registry
            site_record.stale=False
            
            # DEBUG
            #print '*********** ALL RECORDS ***********'
            #all_records = global_dbsession.query(RegRecord).all()
            #for record in all_records: 
            #    print record
            
             
            # For the current site authority, import child entities/records
            
            # NODES
            for node in nodes:
                # Obtain parameters of the node: site_auth, site_name and hrn of the node
                site_auth = get_authority(site_hrn)
                site_name = site['name']
                node_hrn =  hostname_to_hrn(site_hrn, node['name'])
                # Reduce hrn up to 64 characters
                if len(node_hrn) > 64: node_hrn = node_hrn[:64]
                
                # Try to locate the node_hrn in the SFA records
                node_record = self.locate_by_type_hrn ('node', node_hrn )
                if not node_record:
                    # Create/Import record for the node
                    try:
                        # Create a keypair for the node
                        pkey = Keypair(create=True)
                        # Obtain parameters 
                        urn = hrn_to_urn(node_hrn, 'node')
                        node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        # Create record for the node and add it to the Registry
                        node_record = RegNode (hrn=node_hrn, gid=node_gid, 
                                               pointer =node['id'],
                                               authority=get_authority(node_hrn))
                        node_record.just_created()
                        global_dbsession.add(node_record)
                        global_dbsession.commit()
                        self.logger.info("CLabImporter: imported node: %s" %node_hrn)  
                        self.remember_record (node_record)
                    except:
                        self.logger.log_exc("CLabImporter: failed to import node") 
                else:
                    # Node record already in the SFA registry. Update?
                    pass
                
                # Fresh record in SFA Registry
                node_record.stale=False
                # DEBUG
                #print '*********** ALL RECORDS ***********'
                #all_records = global_dbsession.query(RegRecord).all()
                #for record in all_records: 
                #    print record
                
    
            # USERS
            for user in users:
                # dummyimporter uses email... but Clab can use user['name']
                user_hrn = username_to_hrn (site_hrn, user['name'])
                # Reduce hrn up to 64 characters
                if len(user_hrn) > 64: user_hrn = user_hrn[:64]
                user_urn = hrn_to_urn(user_hrn, 'user')
                
                # Try to locate the user_hrn in the SFA records
                user_record = self.locate_by_type_hrn ('user', user_hrn)


                # Auxiliary function to get the keypair of the user from the testbed database
                # If multiple keys, randomly pick the first key in the set
                # If no keys, generate a new keypair for the user's gird
                def init_user_key (user):
                    pubkey = None
                    pkey = None
                    if  user['auth_tokens']:
                        # randomly pick first key in set
                        for key in user['auth_tokens']:
                            pubkey = key
                            try:
                                pkey = convert_public_key(pubkey)
                                break
                            except:
                                continue
                        if not pkey:
                            self.logger.warn('CLabImporter: unable to convert public key for %s' % user_hrn)
                            pkey = Keypair(create=True)
                    else:
                        # the user has no keys. Creating a random keypair for the user's gid
                        self.logger.warn("CLabImporter: user %s does not have a CLab public key"%user_hrn)
                        pkey = Keypair(create=True)
                    return (pubkey, pkey)
                ###########################
                
                try:
                    if not user_record:
                        # Create/Import record for the user
                        # Create a keypair for the node
                        (pubkey,pkey) = init_user_key (user)
                        # Obtain parameters
                        user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
                        user_gid.set_email("*****@*****.**"%(user['name']))
                        # Create record for the node and add it to the Registry
                        user_record = RegUser (hrn=user_hrn, gid=user_gid, 
                                                 pointer=user['id'], 
                                                 authority=get_authority(user_hrn),
                                                 email="*****@*****.**"%(user['name']))
                        if pubkey: 
                            user_record.reg_keys=[RegKey (pubkey)]
                        else:
                            self.logger.warning("No key found for user %s"%user_hrn)
                        user_record.just_created()
                        global_dbsession.add (user_record)
                        global_dbsession.commit()
                        self.logger.info("ClabImporter: imported person: %s" % user_hrn)
                        self.remember_record ( user_record )

                    else:
                        # update the record ?
                        # if user's primary key has changed then we need to update the 
                        # users gid by forcing an update here
                        sfa_keys = user_record.reg_keys
                        def key_in_list (key,sfa_keys):
                            for reg_key in sfa_keys:
                                if reg_key.key==key: return True
                            return False
                        # is there a new key in Dummy TB ?
                        new_keys=False
                        for key in user['auth_tokens']:
                            if not key_in_list (key,sfa_keys):
                                new_keys = True
                        if new_keys:
                            (pubkey,pkey) = init_user_key (user)
                            user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
                            if not pubkey:
                                user_record.reg_keys=[]
                            else:
                                user_record.reg_keys=[ RegKey (pubkey)]
                            self.logger.info("CLabImporter: updated person: %s" % user_hrn)
                    user_record.email = "*****@*****.**"%(user['name'])
                    global_dbsession.commit()
                                        
                    # Fresh record in SFA Registry
                    user_record.stale=False
                except:
                    self.logger.log_exc("CLabImporter: failed to import user %d %s"%(user['id'],user['name']))
            
            # DEBUG
                #print '*********** ALL RECORDS ***********'
                #all_records = global_dbsession.query(RegRecord).all()
                #for record in all_records: 
                #    print record         
                    
            # SLICES
            for slice in slices:
                # Obtain parameters of the node: site_auth, site_name and hrn of the slice
                slice_hrn = slicename_to_hrn(site_hrn, slice['name'])
                # Try to locate the slice_hrn in the SFA records
                slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
                
                if not slice_record:
                    # Create/Import record for the slice
                    try:
                        #Create a keypair for the slice
                        pkey = Keypair(create=True)
                        # Obtain parameters
                        urn = hrn_to_urn(slice_hrn, 'slice')
                        slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        # Create record for the slice and add it to the Registry
                        slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, 
                                                 pointer=slice['id'],
                                                 authority=get_authority(slice_hrn))
                        slice_record.just_created()
                        global_dbsession.add(slice_record)
                        global_dbsession.commit()
                        self.logger.info("CLabImporter: imported slice: %s" % slice_hrn)  
                        self.remember_record ( slice_record )
                    except:
                        self.logger.log_exc("CLabImporter: failed to import slice")
                else:
                    # Slice record already in the SFA registry. Update?
                    self.logger.warning ("Slice already existing in SFA Registry")
                    pass
                
                # Get current users associated with the slice
                users_of_slice = shell.get_users_by_slice(slice)
                # record current users associated with the slice
                slice_record.reg_researchers = \
                    [ self.locate_by_type_pointer ('user',user['id']) for user in users_of_slice]
                global_dbsession.commit()
                                
                # Fresh record in SFA Registry 
                slice_record.stale=False    
                
     
        # Remove stale records. Old/non-fresh records that were in the SFA Registry
        
        # Preserve special records 
        system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
        for record in all_records: 
            if record.hrn in system_hrns: 
                record.stale=False
            if record.peer_authority:
                record.stale=False
                
        # Remove all the records that do not have its stale parameter set to False
        for record in all_records:
            try:
                stale=record.stale
            except:     
                stale=True
                self.logger.warning("stale not found with %s"%record)
            if stale:
                self.logger.info("CLabImporter: deleting stale record: %s" % record)
                global_dbsession.delete(record)
                global_dbsession.commit()
                
        # DEBUG
        print 'SFA REGISTRY - Result of Import:'
        all_records = global_dbsession.query(RegRecord).all()
        for record in all_records: 
            print record  
Beispiel #10
0
    def import_single_slice(self, slicename):
        '''
        Method to import a single slice from the testbed database to the SFA Registry.
        The slice being imported is specified by name. 
        The method is used in the verify_slice method (clab_slices.py) when a slice is automatically
        created in the testbed database.
        
        :param slicename: name of the slice being imported
        :type string        
        '''
        config = Config ()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = ClabShell (config)
        
        self.logger.debug("Import Single slice: %s"%slicename)
                
        # retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()
        
        # Dicts to avoid duplicates in SFA database
        # create dict keyed by (type,hrn) 
        self.records_by_type_hrn = dict([((record.type, record.hrn), record) for record in all_records ] )
        # create dict keyed by (type,pointer) 
        self.records_by_type_pointer = dict([((record.type, record.pointer), record) for record in all_records if record.pointer != -1])
        
        # Retrieve data from the CLab testbed and create dictionaries by id
        # SITE
        site = shell.get_testbed_info()

        # SLICES
        slice = shell.get_slice_by(slice_name=slicename)
        
        # Import records to the SFA registry
        # SITE
        # Get hrn of the site (authority)
        site_hrn = _get_site_hrn(interface_hrn, site)
        # Try to locate the site_hrn in the SFA records
        #site_record=self.locate_by_type_hrn ('authority', site_hrn)
        
        # For the current site authority, import child entities/records    
        # SLICES
        # Obtain parameters of the node: site_auth, site_name and hrn of the slice
        slice_hrn = slicename_to_hrn(slice['name'], site_hrn)
        # Try to locate the slice_hrn in the SFA records
        slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
        
        if not slice_record:
            # Create/Import record for the slice
            try:
                #Create a keypair for the slice
                pkey = Keypair(create=True)
                # Obtain parameters
                urn = hrn_to_urn(slice_hrn, 'slice')
                slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                # Create record for the slice and add it to the Registry
                slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, 
                                         pointer=slice['id'],
                                         authority=get_authority(slice_hrn))
                slice_record.just_created()
                global_dbsession.add(slice_record)
                global_dbsession.commit()
                self.logger.info("CLabImporter: imported slice: %s" % slice_hrn)  
                self.remember_record ( slice_record )
            except:
                self.logger.log_exc("CLabImporter: failed to import slice")
        else:
            # Slice record already in the SFA registry. Update?
            self.logger.warning ("Slice already existing in SFA Registry")
            pass
        
        # Get current users associated with the slice
        users_of_slice = shell.get_users_by_slice(slice)
        # record current users associated with the slice
        slice_record.reg_researchers = \
            [ self.locate_by_type_pointer ('user',user['id']) for user in users_of_slice]
        global_dbsession.commit()
Beispiel #11
0
 def import_single_node(self, nodename):
     '''
     Method to import a single node from the testbed database to the SFA Registry.
     The node being imported is specified by name. 
     The method is used in the verify_node method (clab_slices.py) when a node is automatically
     created in the testbed database.
     
     :param nodename: name of the node being imported
     :type string        
     '''
     config = Config ()
     interface_hrn = config.SFA_INTERFACE_HRN
     root_auth = config.SFA_REGISTRY_ROOT_AUTH
     shell = ClabShell (config)
     
     self.logger.debug("Import Single node: %s"%nodename)
             
     # retrieve all existing SFA objects
     all_records = global_dbsession.query(RegRecord).all()
     
     # Dicts to avoid duplicates in SFA database
     # create dict keyed by (type,hrn) 
     self.records_by_type_hrn = dict([((record.type, record.hrn), record) for record in all_records ] )
     # create dict keyed by (type,pointer) 
     self.records_by_type_pointer = dict([((record.type, record.pointer), record) for record in all_records if record.pointer != -1])
     
     # Retrieve data from the CLab testbed and create dictionaries by id
     # SITE
     site = shell.get_testbed_info()
     
     # NODES
     node = shell.get_node_by(node_name=nodename)
     
     # Import records to the SFA registry
     # SITE
     # Get hrn of the site (authority)
     site_hrn = _get_site_hrn(interface_hrn, site)
     # Try to locate the site_hrn in the SFA records
     #site_record=self.locate_by_type_hrn ('authority', site_hrn)
             
     # NODE
     # Obtain parameters of the node: site_auth, site_name and hrn of the node
     site_auth = get_authority(site_hrn)
     site_name = site['name']
     node_hrn =  hostname_to_hrn(site_hrn, node['name'])
     # Reduce hrn up to 64 characters
     if len(node_hrn) > 64: node_hrn = node_hrn[:64]
     
     # Try to locate the node_hrn in the SFA records
     node_record = self.locate_by_type_hrn ('node', node_hrn )
     if not node_record:
         # Create/Import record for the node
         try:
             # Create a keypair for the node
             pkey = Keypair(create=True)
             # Obtain parameters 
             urn = hrn_to_urn(node_hrn, 'node')
             node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
             # Create record for the node and add it to the Registry
             node_record = RegNode (hrn=node_hrn, gid=node_gid, 
                                    pointer =node['id'],
                                    authority=get_authority(node_hrn))
             node_record.just_created()
             global_dbsession.add(node_record)
             global_dbsession.commit()
             self.logger.info("CLabImporter: imported node: %s" %node_hrn)  
             self.remember_record (node_record)
         except:
             self.logger.log_exc("CLabImporter: failed to import node") 
     else:
         # Node record already in the SFA registry. Update?
         pass
Beispiel #12
0
 def record_exists(self, type, hrn):
     return global_dbsession.query(RegRecord).filter_by(
         hrn=hrn, type=type).count() != 0
Beispiel #13
0
    def run(self, options):
        config = Config()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = ClabShell(config)

        # retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()

        # Delete all default records
        #for record in all_records:
        #    global_dbsession.delete(record)
        #    global_dbsession.commit()
        #all_records = global_dbsession.query(RegRecord).all()

        # Dicts to avoid duplicates in SFA database
        # create dict keyed by (type,hrn)
        self.records_by_type_hrn = dict([((record.type, record.hrn), record)
                                         for record in all_records])
        # create dict keyed by (type,pointer)
        self.records_by_type_pointer = dict([((record.type, record.pointer),
                                              record) for record in all_records
                                             if record.pointer != -1])

        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
        for record in all_records:
            record.stale = True

        # Retrieve data from the CLab testbed and create dictionaries by id
        # SITE
        sites = [shell.get_testbed_info()]

        # USERS
        users = shell.get_users({})

        #users_by_id = dict ( [ ( user['id'], user) for user in users ] )
        # KEYS
        # auth_tokens of the users. Dict (user_id:[keys])

        # NODES
        nodes = shell.get_nodes({})

        # SLICES
        slices = shell.get_slices({})

        # Import records to the SFA registry
        # SITE
        for site in sites:
            # Get hrn of the site (authority)
            site_hrn = _get_site_hrn(interface_hrn, site)
            # Try to locate the site_hrn in the SFA records
            site_record = self.locate_by_type_hrn('authority', site_hrn)

            if not site_record:
                # Create/Import record for the site authority
                try:
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)
                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    # Create record for the site authority and add it to the Registry
                    site_record = RegAuthority(
                        hrn=site_hrn,
                        gid=auth_info.get_gid_object(),
                        pointer=-1,
                        authority=get_authority(site_hrn))
                    site_record.just_created()
                    global_dbsession.add(site_record)
                    global_dbsession.commit()
                    self.logger.info(
                        "CLabImporter: imported authority (site) : %s" %
                        site_hrn)
                    self.remember_record(site_record)
                except:
                    # if the site import fails then there is no point in trying to import the
                    # site's child records (node, slices, persons), so skip them.
                    self.logger.log_exc(
                        "CLabImporter: failed to import site. Skipping child records"
                    )
                    continue
            else:
                # Authority record already in the SFA registry. Update?
                pass

            # Fresh record in SFA Registry
            site_record.stale = False

            # DEBUG
            #print '*********** ALL RECORDS ***********'
            #all_records = global_dbsession.query(RegRecord).all()
            #for record in all_records:
            #    print record

            # For the current site authority, import child entities/records

            # NODES
            for node in nodes:
                # Obtain parameters of the node: site_auth, site_name and hrn of the node
                site_auth = get_authority(site_hrn)
                site_name = site['name']
                node_hrn = hostname_to_hrn(site_hrn, node['name'])
                # Reduce hrn up to 64 characters
                if len(node_hrn) > 64: node_hrn = node_hrn[:64]

                # Try to locate the node_hrn in the SFA records
                node_record = self.locate_by_type_hrn('node', node_hrn)
                if not node_record:
                    # Create/Import record for the node
                    try:
                        # Create a keypair for the node
                        pkey = Keypair(create=True)
                        # Obtain parameters
                        urn = hrn_to_urn(node_hrn, 'node')
                        node_gid = self.auth_hierarchy.create_gid(
                            urn, create_uuid(), pkey)
                        # Create record for the node and add it to the Registry
                        node_record = RegNode(
                            hrn=node_hrn,
                            gid=node_gid,
                            pointer=node['id'],
                            authority=get_authority(node_hrn))
                        node_record.just_created()
                        global_dbsession.add(node_record)
                        global_dbsession.commit()
                        self.logger.info("CLabImporter: imported node: %s" %
                                         node_hrn)
                        self.remember_record(node_record)
                    except:
                        self.logger.log_exc(
                            "CLabImporter: failed to import node")
                else:
                    # Node record already in the SFA registry. Update?
                    pass

                # Fresh record in SFA Registry
                node_record.stale = False
                # DEBUG
                #print '*********** ALL RECORDS ***********'
                #all_records = global_dbsession.query(RegRecord).all()
                #for record in all_records:
                #    print record

            # USERS
            for user in users:
                # dummyimporter uses email... but Clab can use user['name']
                user_hrn = username_to_hrn(site_hrn, user['name'])
                # Reduce hrn up to 64 characters
                if len(user_hrn) > 64: user_hrn = user_hrn[:64]
                user_urn = hrn_to_urn(user_hrn, 'user')

                # Try to locate the user_hrn in the SFA records
                user_record = self.locate_by_type_hrn('user', user_hrn)

                # Auxiliary function to get the keypair of the user from the testbed database
                # If multiple keys, randomly pick the first key in the set
                # If no keys, generate a new keypair for the user's gird
                def init_user_key(user):
                    pubkey = None
                    pkey = None
                    if user['auth_tokens']:
                        # randomly pick first key in set
                        for key in user['auth_tokens']:
                            pubkey = key
                            try:
                                pkey = convert_public_key(pubkey)
                                break
                            except:
                                continue
                        if not pkey:
                            self.logger.warn(
                                'CLabImporter: unable to convert public key for %s'
                                % user_hrn)
                            pkey = Keypair(create=True)
                    else:
                        # the user has no keys. Creating a random keypair for the user's gid
                        self.logger.warn(
                            "CLabImporter: user %s does not have a CLab public key"
                            % user_hrn)
                        pkey = Keypair(create=True)
                    return (pubkey, pkey)

                ###########################

                try:
                    if not user_record:
                        # Create/Import record for the user
                        # Create a keypair for the node
                        (pubkey, pkey) = init_user_key(user)
                        # Obtain parameters
                        user_gid = self.auth_hierarchy.create_gid(
                            user_urn, create_uuid(), pkey)
                        user_gid.set_email("*****@*****.**" % (user['name']))
                        # Create record for the node and add it to the Registry
                        user_record = RegUser(
                            hrn=user_hrn,
                            gid=user_gid,
                            pointer=user['id'],
                            authority=get_authority(user_hrn),
                            email="*****@*****.**" % (user['name']))
                        if pubkey:
                            user_record.reg_keys = [RegKey(pubkey)]
                        else:
                            self.logger.warning("No key found for user %s" %
                                                user_hrn)
                        user_record.just_created()
                        global_dbsession.add(user_record)
                        global_dbsession.commit()
                        self.logger.info("ClabImporter: imported person: %s" %
                                         user_hrn)
                        self.remember_record(user_record)

                    else:
                        # update the record ?
                        # if user's primary key has changed then we need to update the
                        # users gid by forcing an update here
                        sfa_keys = user_record.reg_keys

                        def key_in_list(key, sfa_keys):
                            for reg_key in sfa_keys:
                                if reg_key.key == key: return True
                            return False

                        # is there a new key in Dummy TB ?
                        new_keys = False
                        for key in user['auth_tokens']:
                            if not key_in_list(key, sfa_keys):
                                new_keys = True
                        if new_keys:
                            (pubkey, pkey) = init_user_key(user)
                            user_gid = self.auth_hierarchy.create_gid(
                                user_urn, create_uuid(), pkey)
                            if not pubkey:
                                user_record.reg_keys = []
                            else:
                                user_record.reg_keys = [RegKey(pubkey)]
                            self.logger.info(
                                "CLabImporter: updated person: %s" % user_hrn)
                    user_record.email = "*****@*****.**" % (user['name'])
                    global_dbsession.commit()

                    # Fresh record in SFA Registry
                    user_record.stale = False
                except:
                    self.logger.log_exc(
                        "CLabImporter: failed to import user %d %s" %
                        (user['id'], user['name']))

            # DEBUG
            #print '*********** ALL RECORDS ***********'
            #all_records = global_dbsession.query(RegRecord).all()
            #for record in all_records:
            #    print record

            # SLICES
            for slice in slices:
                # Obtain parameters of the node: site_auth, site_name and hrn of the slice
                slice_hrn = slicename_to_hrn(site_hrn, slice['name'])
                # Try to locate the slice_hrn in the SFA records
                slice_record = self.locate_by_type_hrn('slice', slice_hrn)

                if not slice_record:
                    # Create/Import record for the slice
                    try:
                        #Create a keypair for the slice
                        pkey = Keypair(create=True)
                        # Obtain parameters
                        urn = hrn_to_urn(slice_hrn, 'slice')
                        slice_gid = self.auth_hierarchy.create_gid(
                            urn, create_uuid(), pkey)
                        # Create record for the slice and add it to the Registry
                        slice_record = RegSlice(
                            hrn=slice_hrn,
                            gid=slice_gid,
                            pointer=slice['id'],
                            authority=get_authority(slice_hrn))
                        slice_record.just_created()
                        global_dbsession.add(slice_record)
                        global_dbsession.commit()
                        self.logger.info("CLabImporter: imported slice: %s" %
                                         slice_hrn)
                        self.remember_record(slice_record)
                    except:
                        self.logger.log_exc(
                            "CLabImporter: failed to import slice")
                else:
                    # Slice record already in the SFA registry. Update?
                    self.logger.warning(
                        "Slice already existing in SFA Registry")
                    pass

                # Get current users associated with the slice
                users_of_slice = shell.get_users_by_slice(slice)
                # record current users associated with the slice
                slice_record.reg_researchers = \
                    [ self.locate_by_type_pointer ('user',user['id']) for user in users_of_slice]
                global_dbsession.commit()

                # Fresh record in SFA Registry
                slice_record.stale = False

        # Remove stale records. Old/non-fresh records that were in the SFA Registry

        # Preserve special records
        system_hrns = [
            interface_hrn, root_auth, interface_hrn + '.slicemanager'
        ]
        for record in all_records:
            if record.hrn in system_hrns:
                record.stale = False
            if record.peer_authority:
                record.stale = False

        # Remove all the records that do not have its stale parameter set to False
        for record in all_records:
            try:
                stale = record.stale
            except:
                stale = True
                self.logger.warning("stale not found with %s" % record)
            if stale:
                self.logger.info("CLabImporter: deleting stale record: %s" %
                                 record)
                global_dbsession.delete(record)
                global_dbsession.commit()

        # DEBUG
        print 'SFA REGISTRY - Result of Import:'
        all_records = global_dbsession.query(RegRecord).all()
        for record in all_records:
            print record
Beispiel #14
0
    def import_single_slice(self, slicename):
        '''
        Method to import a single slice from the testbed database to the SFA Registry.
        The slice being imported is specified by name. 
        The method is used in the verify_slice method (clab_slices.py) when a slice is automatically
        created in the testbed database.
        
        :param slicename: name of the slice being imported
        :type string        
        '''
        config = Config()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = ClabShell(config)

        self.logger.debug("Import Single slice: %s" % slicename)

        # retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()

        # Dicts to avoid duplicates in SFA database
        # create dict keyed by (type,hrn)
        self.records_by_type_hrn = dict([((record.type, record.hrn), record)
                                         for record in all_records])
        # create dict keyed by (type,pointer)
        self.records_by_type_pointer = dict([((record.type, record.pointer),
                                              record) for record in all_records
                                             if record.pointer != -1])

        # Retrieve data from the CLab testbed and create dictionaries by id
        # SITE
        site = shell.get_testbed_info()

        # SLICES
        slice = shell.get_slice_by(slice_name=slicename)

        # Import records to the SFA registry
        # SITE
        # Get hrn of the site (authority)
        site_hrn = _get_site_hrn(interface_hrn, site)
        # Try to locate the site_hrn in the SFA records
        #site_record=self.locate_by_type_hrn ('authority', site_hrn)

        # For the current site authority, import child entities/records
        # SLICES
        # Obtain parameters of the node: site_auth, site_name and hrn of the slice
        slice_hrn = slicename_to_hrn(slice['name'], site_hrn)
        # Try to locate the slice_hrn in the SFA records
        slice_record = self.locate_by_type_hrn('slice', slice_hrn)

        if not slice_record:
            # Create/Import record for the slice
            try:
                #Create a keypair for the slice
                pkey = Keypair(create=True)
                # Obtain parameters
                urn = hrn_to_urn(slice_hrn, 'slice')
                slice_gid = self.auth_hierarchy.create_gid(
                    urn, create_uuid(), pkey)
                # Create record for the slice and add it to the Registry
                slice_record = RegSlice(hrn=slice_hrn,
                                        gid=slice_gid,
                                        pointer=slice['id'],
                                        authority=get_authority(slice_hrn))
                slice_record.just_created()
                global_dbsession.add(slice_record)
                global_dbsession.commit()
                self.logger.info("CLabImporter: imported slice: %s" %
                                 slice_hrn)
                self.remember_record(slice_record)
            except:
                self.logger.log_exc("CLabImporter: failed to import slice")
        else:
            # Slice record already in the SFA registry. Update?
            self.logger.warning("Slice already existing in SFA Registry")
            pass

        # Get current users associated with the slice
        users_of_slice = shell.get_users_by_slice(slice)
        # record current users associated with the slice
        slice_record.reg_researchers = \
            [ self.locate_by_type_pointer ('user',user['id']) for user in users_of_slice]
        global_dbsession.commit()
Beispiel #15
0
    def import_single_node(self, nodename):
        '''
        Method to import a single node from the testbed database to the SFA Registry.
        The node being imported is specified by name. 
        The method is used in the verify_node method (clab_slices.py) when a node is automatically
        created in the testbed database.
        
        :param nodename: name of the node being imported
        :type string        
        '''
        config = Config()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = ClabShell(config)

        self.logger.debug("Import Single node: %s" % nodename)

        # retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()

        # Dicts to avoid duplicates in SFA database
        # create dict keyed by (type,hrn)
        self.records_by_type_hrn = dict([((record.type, record.hrn), record)
                                         for record in all_records])
        # create dict keyed by (type,pointer)
        self.records_by_type_pointer = dict([((record.type, record.pointer),
                                              record) for record in all_records
                                             if record.pointer != -1])

        # Retrieve data from the CLab testbed and create dictionaries by id
        # SITE
        site = shell.get_testbed_info()

        # NODES
        node = shell.get_node_by(node_name=nodename)

        # Import records to the SFA registry
        # SITE
        # Get hrn of the site (authority)
        site_hrn = _get_site_hrn(interface_hrn, site)
        # Try to locate the site_hrn in the SFA records
        #site_record=self.locate_by_type_hrn ('authority', site_hrn)

        # NODE
        # Obtain parameters of the node: site_auth, site_name and hrn of the node
        site_auth = get_authority(site_hrn)
        site_name = site['name']
        node_hrn = hostname_to_hrn(site_hrn, node['name'])
        # Reduce hrn up to 64 characters
        if len(node_hrn) > 64: node_hrn = node_hrn[:64]

        # Try to locate the node_hrn in the SFA records
        node_record = self.locate_by_type_hrn('node', node_hrn)
        if not node_record:
            # Create/Import record for the node
            try:
                # Create a keypair for the node
                pkey = Keypair(create=True)
                # Obtain parameters
                urn = hrn_to_urn(node_hrn, 'node')
                node_gid = self.auth_hierarchy.create_gid(
                    urn, create_uuid(), pkey)
                # Create record for the node and add it to the Registry
                node_record = RegNode(hrn=node_hrn,
                                      gid=node_gid,
                                      pointer=node['id'],
                                      authority=get_authority(node_hrn))
                node_record.just_created()
                global_dbsession.add(node_record)
                global_dbsession.commit()
                self.logger.info("CLabImporter: imported node: %s" % node_hrn)
                self.remember_record(node_record)
            except:
                self.logger.log_exc("CLabImporter: failed to import node")
        else:
            # Node record already in the SFA registry. Update?
            pass
Beispiel #16
0
 def record_exists (self, type, hrn):
    return global_dbsession.query(RegRecord).filter_by(hrn=hrn,type=type).count()!=0