Ejemplo n.º 1
0
def get_credentials(pl_username, private_key, sfi_dir, password):
    # Getting user from PLE
    auth = {
        'AuthMethod': 'password',
        'Username': pl_username,
        'AuthString': password
    }
    ple = xmlrpclib.Server(PLE_API, allow_none=1)
    persons = ple.GetPersons(auth, {'email': pl_username},
                             ['person_id', 'site_ids'])
    if not persons:
        raise Exception, "User not found."
        sys.exit(1)
    person_id = persons[0]['person_id']
    site_id = persons[0]['site_ids'][0]

    # Getting site from PLE
    sites = ple.GetSites(auth, {'site_id': site_id}, ['login_base'])
    if not sites:
        raise Exception, "Site not found"
        sys.exit(1)
    site_hrn = ".".join([INTERFACE_HRN, sites[0]['login_base']])

    user_hrn = email_to_hrn(site_hrn, pl_username)

    # Getting slices from PLE
    slices = ple.GetSlices(auth, {}, ['name', 'person_ids'])
    slices = [s for s in slices if person_id in s['person_ids']]

    # Delegating user account and slices
    sfa = SfaHelper(user_hrn, private_key, sfi_dir)
    sfa.bootstrap()

    creds = []

    c = {
        'target': user_hrn,
        'type': 'user',
        'cred': sfa.delegate('user', user_hrn)
    }
    creds.append(c)

    user_auth = get_authority(user_hrn)
    c = {
        'target': user_auth,
        'type': 'authority',
        'cred': sfa.delegate('authority', user_auth)
    }
    creds.append(c)

    for s in slices:
        s_hrn = slicename_to_hrn(INTERFACE_HRN, s['name'])
        c = {
            'target': s_hrn,
            'type': 'slice',
            'cred': sfa.delegate('slice', s_hrn)
        }
        creds.append(c)
    return creds
Ejemplo n.º 2
0
 def list_slices (self, creds, options):
     # look in cache first
     if self.cache:
         slices = self.cache.get('slices')
         if slices:
             logger.debug("PlDriver.list_slices returns from cache")
             return slices
 
     # get data from db 
     slices = self.shell.GetSlices({'peer_id': None}, ['name'])
     slice_hrns = [slicename_to_hrn(self.hrn, slice['name']) for slice in slices]
     slice_urns = [hrn_to_urn(slice_hrn, 'slice') for slice_hrn in slice_hrns]
 
     # cache the result
     if self.cache:
         logger.debug ("PlDriver.list_slices stores value in cache")
         self.cache.add('slices', slice_urns) 
 
     return slice_urns
Ejemplo n.º 3
0
    def get_leases(self, slice_xrn=None, slice=None, options={}):
        
        if slice_xrn and not slice:
            return []

        now = int(time.time())
        filter={}
        filter.update({'clip':now})
        if slice:
           filter.update({'name':slice['name']})
        return_fields = ['lease_id', 'hostname', 'site_id', 'name', 't_from', 't_until']
        leases = self.driver.shell.GetLeases(filter)
        grain = self.driver.shell.GetLeaseGranularity()

        site_ids = []
        for lease in leases:
            site_ids.append(lease['site_id'])

        # get sites
        sites_dict  = self.get_sites({'site_id': site_ids}) 
  
        rspec_leases = []
        for lease in leases:

            rspec_lease = Lease()
            
            # xxx how to retrieve site['login_base']
            site_id=lease['site_id']
            site=sites_dict[site_id]

            #rspec_lease['lease_id'] = lease['lease_id']
            rspec_lease['component_id'] = hostname_to_urn(self.driver.hrn, site['login_base'], lease['hostname'])
            if slice_xrn:
                slice_urn = slice_xrn
                slice_hrn = urn_to_hrn(slice_urn)
            else:
                slice_hrn = slicename_to_hrn(self.driver.hrn, lease['name'])
                slice_urn = hrn_to_urn(slice_hrn, 'slice')
            rspec_lease['slice_id'] = slice_urn
            rspec_lease['start_time'] = lease['t_from']
            rspec_lease['duration'] = (lease['t_until'] - lease['t_from']) / grain
            rspec_leases.append(rspec_lease)
        return rspec_leases
Ejemplo n.º 4
0
    def fill_record_hrns(self, records):
        """
        convert pl ids to hrns
        """

        # get ids
        slice_ids, person_ids, site_ids, node_ids = [], [], [], []
        for record in records:
            if 'site_id' in record:
                site_ids.append(record['site_id'])
            if 'site_ids' in record:
                site_ids.extend(record['site_ids'])
            if 'person_ids' in record:
                person_ids.extend(record['person_ids'])
            if 'slice_ids' in record:
                slice_ids.extend(record['slice_ids'])
            if 'node_ids' in record:
                node_ids.extend(record['node_ids'])

        # get pl records
        slices, persons, sites, nodes = {}, {}, {}, {}
        if site_ids:
            site_list = self.shell.GetSites(site_ids, ['site_id', 'login_base'])
            sites = list_to_dict(site_list, 'site_id')
        if person_ids:
            person_list = self.shell.GetPersons(person_ids, ['person_id', 'email'])
            persons = list_to_dict(person_list, 'person_id')
        if slice_ids:
            slice_list = self.shell.GetSlices(slice_ids, ['slice_id', 'name'])
            slices = list_to_dict(slice_list, 'slice_id')       
        if node_ids:
            node_list = self.shell.GetNodes(node_ids, ['node_id', 'hostname'])
            nodes = list_to_dict(node_list, 'node_id')
       
        # convert ids to hrns
        for record in records:
            # get all relevant data
            type = record['type']
            pointer = record['pointer']
            auth_hrn = self.hrn
            login_base = ''
            if pointer == -1:
                continue

            if 'site_id' in record:
                site = sites[record['site_id']]
                login_base = site['login_base']
                record['site'] = ".".join([auth_hrn, login_base])
            if 'person_ids' in record:
                emails = [persons[person_id]['email'] for person_id in record['person_ids'] \
                          if person_id in  persons]
                usernames = [email.split('@')[0] for email in emails]
                person_hrns = [".".join([auth_hrn, login_base, username]) for username in usernames]
                record['persons'] = person_hrns 
            if 'slice_ids' in record:
                slicenames = [slices[slice_id]['name'] for slice_id in record['slice_ids'] \
                              if slice_id in slices]
                slice_hrns = [slicename_to_hrn(auth_hrn, slicename) for slicename in slicenames]
                record['slices'] = slice_hrns
            if 'node_ids' in record:
                hostnames = [nodes[node_id]['hostname'] for node_id in record['node_ids'] \
                             if node_id in nodes]
                node_hrns = [hostname_to_hrn(auth_hrn, login_base, hostname) for hostname in hostnames]
                record['nodes'] = node_hrns
            if 'site_ids' in record:
                login_bases = [sites[site_id]['login_base'] for site_id in record['site_ids'] \
                               if site_id in sites]
                site_hrns = [".".join([auth_hrn, lbase]) for lbase in login_bases]
                record['sites'] = site_hrns

            if 'expires' in record:
                date = utcparse(record['expires'])
                datestring = datetime_to_string(date)
                record['expires'] = datestring 
            
        return records   
Ejemplo n.º 5
0
    def run (self, options):
        config = Config ()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = PlShell (config)

        ######## retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()

        # create hash by (type,hrn) 
        # we essentially use this to know if a given record is already known to SFA 
        self.records_by_type_hrn = \
            dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] )
        # create hash by (type,pointer) 
        self.records_by_type_pointer = \
            dict ( [ ( (record.type, record.pointer) , record ) for record in all_records 
                     if record.pointer != -1] )

        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
        for record in all_records: record.stale=True

        ######## retrieve PLC data
        # Get all plc sites
        # retrieve only required stuf
        #sites = shell.GetSites({'peer_id': None, 'enabled' : True},
        #                       ['site_id','login_base','node_ids','slice_ids','person_ids', 'name', 'hrn'])
        #wangyang
        sites = shell.GetSites({'peer_id': None, 'enabled' : True},
                               ['site_id','login_base','node_ids','slice_ids','person_ids', 'name'])
        # create a hash of sites by login_base
#        sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
        # Get all plc users
        #persons = shell.GetPersons({'peer_id': None, 'enabled': True}, 
        #                           ['person_id', 'email', 'key_ids', 'site_ids', 'role_ids', 'hrn'])
        #wangyang
        persons = shell.GetPersons({'peer_id': None, 'enabled': True}, 
                                   ['person_id', 'email', 'key_ids', 'site_ids', 'role_ids'])
        # create a hash of persons by person_id
        persons_by_id = dict ( [ ( person['person_id'], person) for person in persons ] )
        # also gather non-enabled user accounts so as to issue relevant warnings
        disabled_persons = shell.GetPersons({'peer_id': None, 'enabled': False}, ['person_id'])
        disabled_person_ids = [ person['person_id'] for person in disabled_persons ] 
        # Get all plc public keys
        # accumulate key ids for keys retrieval
        key_ids = []
        for person in persons:
            key_ids.extend(person['key_ids'])
        keys = shell.GetKeys( {'peer_id': None, 'key_id': key_ids,
                               'key_type': 'ssh'} )
        # create a hash of keys by key_id
        keys_by_id = dict ( [ ( key['key_id'], key ) for key in keys ] ) 
        # create a dict person_id -> [ (plc)keys ]
        keys_by_person_id = {} 
        for person in persons:
            pubkeys = []
            for key_id in person['key_ids']:
                # by construction all the keys we fetched are ssh keys
                # so gpg keys won't be in there
                try:
                    key = keys_by_id[key_id]
                    pubkeys.append(key)
                except:
                    self.logger.warning("Could not spot key %d - probably non-ssh"%key_id)
            keys_by_person_id[person['person_id']] = pubkeys
        # Get all plc nodes  
        nodes = shell.GetNodes( {'peer_id': None}, ['node_id', 'hostname', 'site_id'])
        # create hash by node_id
        nodes_by_id = dict ( [ ( node['node_id'], node, ) for node in nodes ] )
        # Get all plc slices
        slices = shell.GetSlices( {'peer_id': None}, ['slice_id', 'name', 'person_ids', 'hrn'])
        #wangyang
        #slices = shell.GetSlices( {'peer_id': None}, ['slice_id', 'name', 'person_ids'])
        # create hash by slice_id
        slices_by_id = dict ( [ (slice['slice_id'], slice ) for slice in slices ] )

        # isolate special vini case in separate method
        self.create_special_vini_record (interface_hrn)

        # Get top authority record
        top_auth_record=self.locate_by_type_hrn ('authority', root_auth)
        admins = []

        # start importing 
        for site in sites:
            try:
               site_sfa_created = shell.GetSiteSfaCreated(site['site_id'])
            except: 
               site_sfa_created = None
            if site['name'].startswith('sfa:') or site_sfa_created == 'True':
                continue

            site_hrn = _get_site_hrn(interface_hrn, site)
            #site_hrn = site['hrn']
            # import if hrn is not in list of existing hrns or if the hrn exists
            # but its not a site record
            site_record=self.locate_by_type_hrn ('authority', site_hrn)
            if not site_record:
                try:
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)
                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
                                               pointer=site['site_id'],
                                               authority=get_authority(site_hrn))
                    site_record.just_created()
                    global_dbsession.add(site_record)
                    global_dbsession.commit()
                    self.logger.info("PlImporter: imported authority (site) : %s" % site_record) 
                    self.remember_record (site_record)
                except:
                    # if the site import fails then there is no point in trying to import the
                    # site's child records (node, slices, persons), so skip them.
                    self.logger.log_exc("PlImporter: failed to import site %s. Skipping child records"%site_hrn) 
                    continue 
            else:
                # xxx update the record ...
                pass
            site_record.stale=False
             
            # import node records
            for node_id in site['node_ids']:
                try:
                    node = nodes_by_id[node_id]
                except:
                    self.logger.warning ("PlImporter: cannot find node_id %s - ignored"%node_id)
                    continue 
                site_auth = get_authority(site_hrn)
                site_name = site['login_base']
                node_hrn =  hostname_to_hrn(site_auth, site_name, node['hostname'])
                # xxx this sounds suspicious
                if len(node_hrn) > 64: node_hrn = node_hrn[:64]
                node_record = self.locate_by_type_hrn ( 'node', node_hrn )
                if not node_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(node_hrn, 'node')
                        node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        node_record = RegNode (hrn=node_hrn, gid=node_gid, 
                                               pointer =node['node_id'],
                                               authority=get_authority(node_hrn))
                        node_record.just_created()
                        global_dbsession.add(node_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported node: %s" % node_record)  
                        self.remember_record (node_record)
                    except:
                        self.logger.log_exc("PlImporter: failed to import node %s"%node_hrn) 
                        continue
                else:
                    # xxx update the record ...
                    pass
                node_record.stale=False

            site_pis=[]
            # import persons
            for person_id in site['person_ids']:
                proceed=False
                if person_id in persons_by_id:
                    person=persons_by_id[person_id]
                    proceed=True
                elif person_id in disabled_person_ids:
                    pass
                else:
                    self.logger.warning ("PlImporter: cannot locate person_id %s in site %s - ignored"%(person_id,site_hrn))
                # make sure to NOT run this if anything is wrong
                if not proceed: continue

                person_hrn = email_to_hrn(site_hrn, person['email'])
                #person_hrn = person['hrn']
                if person_hrn is None:
                    self.logger.warn("Person %s has no hrn - skipped"%person['email'])
                    continue
                # xxx suspicious again
                if len(person_hrn) > 64: person_hrn = person_hrn[:64]
                person_urn = hrn_to_urn(person_hrn, 'user')

                user_record = self.locate_by_type_hrn ( 'user', person_hrn)

                # return a tuple pubkey (a plc key object) and pkey (a Keypair object)
                def init_person_key (person, plc_keys):
                    pubkey=None
                    if  person['key_ids']:
                        # randomly pick first key in set
                        pubkey = plc_keys[0]
                        try:
                            pkey = convert_public_key(pubkey['key'])
                        except:
                            self.logger.warn('PlImporter: unable to convert public key for %s' % person_hrn)
                            pkey = Keypair(create=True)
                    else:
                        # the user has no keys. Creating a random keypair for the user's gid
                        self.logger.warn("PlImporter: person %s does not have a PL public key"%person_hrn)
                        pkey = Keypair(create=True)
                    return (pubkey, pkey)

                # new person
                try:
                    plc_keys = keys_by_person_id.get(person['person_id'],[])
                    if not user_record:
                        (pubkey,pkey) = init_person_key (person, plc_keys )
                        person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey, email=person['email'])
                        user_record = RegUser (hrn=person_hrn, gid=person_gid, 
                                               pointer=person['person_id'], 
                                               authority=get_authority(person_hrn),
                                               email=person['email'])
                        if pubkey: 
                            user_record.reg_keys=[RegKey (pubkey['key'], pubkey['key_id'])]
                        else:
                            self.logger.warning("No key found for user %s"%user_record)
                        user_record.just_created()
                        global_dbsession.add (user_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported person: %s" % user_record)
                        self.remember_record ( user_record )
                    else:
                        # update the record ?
                        #
                        # if a user key has changed then we need to update the
                        # users gid by forcing an update here
                        #
                        # right now, SFA only has *one* key attached to a user, and this is
                        # the key that the GID was made with
                        # so the logic here is, we consider that things are OK (unchanged) if
                        # all the SFA keys are present as PLC keys
                        # otherwise we trigger the creation of a new gid from *some* plc key
                        # and record this on the SFA side
                        # it would make sense to add a feature in PLC so that one could pick a 'primary'
                        # key but this is not available on the myplc side for now
                        # = or = it would be much better to support several keys in SFA but that
                        # does not seem doable without a major overhaul in the data model as
                        # a GID is attached to a hrn, but it's also linked to a key, so...
                        # NOTE: with this logic, the first key entered in PLC remains the one
                        # current in SFA until it is removed from PLC
                        sfa_keys = user_record.reg_keys
                        def sfa_key_in_list (sfa_key,plc_keys):
                            for plc_key in plc_keys:
                                if plc_key['key']==sfa_key.key:
                                    return True
                            return False
                        # are all the SFA keys known to PLC ?
                        new_keys=False
                        if not sfa_keys and plc_keys:
                            new_keys=True
                        else: 
                            for sfa_key in sfa_keys:
                                 if not sfa_key_in_list (sfa_key,plc_keys):
                                     new_keys = True
                        if new_keys:
                            (pubkey,pkey) = init_person_key (person, plc_keys)
                            person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey)
                            person_gid.set_email(person['email'])
                            if not pubkey:
                                user_record.reg_keys=[]
                            else:
                                user_record.reg_keys=[ RegKey (pubkey['key'], pubkey['key_id'])]
                            user_record.gid = person_gid
                            user_record.just_updated()
                            self.logger.info("PlImporter: updated person: %s" % user_record)
                    user_record.email = person['email']
                    global_dbsession.commit()
                    user_record.stale=False
                    # accumulate PIs - PLCAPI has a limitation that when someone has PI role
                    # this is valid for all sites she is in..
                    # PI is coded with role_id==20
                    if 20 in person['role_ids']:
                        site_pis.append (user_record)

                    # PL Admins need to marked as PI of the top authority record
                    if 10 in person['role_ids'] and user_record not in top_auth_record.reg_pis:
                        admins.append(user_record)

                except:
                    self.logger.log_exc("PlImporter: failed to import person %d %s"%(person['person_id'],person['email']))
    
            # maintain the list of PIs for a given site
            # for the record, Jordan had proposed the following addition as a welcome hotfix to a previous version:
            # site_pis = list(set(site_pis)) 
            # this was likely due to a bug in the above logic, that had to do with disabled persons
            # being improperly handled, and where the whole loop on persons
            # could be performed twice with the same person...
            # so hopefully we do not need to eliminate duplicates explicitly here anymore
            site_record.reg_pis = list(set(site_pis))
            global_dbsession.commit()

            # import slices
            for slice_id in site['slice_ids']:
                try:
                    slice = slices_by_id[slice_id]
                except:
                    self.logger.warning ("PlImporter: cannot locate slice_id %s - ignored"%slice_id)
                    continue
                slice_hrn = slicename_to_hrn(interface_hrn, slice['name'])
                #slice_hrn = slice['hrn']
                if slice_hrn is None:
                    self.logger.warning("Slice %s has no hrn - skipped"%slice['name'])
                    continue
                slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
                if not slice_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(slice_hrn, 'slice')
                        slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
                        slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, 
                                                 pointer=slice['slice_id'],
                                                 authority=get_authority(slice_hrn))
                        slice_record.just_created()
                        global_dbsession.add(slice_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported slice: %s" % slice_record)  
                        self.remember_record ( slice_record )
                    except:
                        self.logger.log_exc("PlImporter: failed to import slice %s (%s)"%(slice_hrn,slice['name']))
                else:
                    # xxx update the record ...
                    # given that we record the current set of users anyways, there does not seem to be much left to do here
                    # self.logger.warning ("Slice update not yet implemented on slice %s (%s)"%(slice_hrn,slice['name']))
                    pass
                # record current users affiliated with the slice
                slice_record.reg_researchers = \
                    [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['person_ids'] ]
                global_dbsession.commit()
                slice_record.stale=False

        # Set PL Admins as PI's of the top authority
        if admins:
            top_auth_record.reg_pis = list(set(admins))
            global_dbsession.commit()
            self.logger.info('PlImporter: set PL admins %s as PIs of %s'%(admins,top_auth_record.hrn))

        ### remove stale records
        # special records must be preserved
        system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
        for record in all_records: 
            if record.hrn in system_hrns: 
                record.stale=False
            if record.peer_authority:
                record.stale=False
            if ".vini" in interface_hrn and interface_hrn.endswith('vini') and \
                record.hrn.endswith("internet2"):
                record.stale=False

        for record in all_records:
            try:        stale=record.stale
            except:     
                stale=True
                self.logger.warning("stale not found with %s"%record)
            if stale:
                self.logger.info("PlImporter: deleting stale record: %s" % record)
                global_dbsession.delete(record)
                global_dbsession.commit()
Ejemplo n.º 6
0
def get_gids(registry=None, verbose=False):
    """
    Get the gid for all instantiated slices on this node and store it
    in /etc/sfa/slice.gid in the slice's filesystem
    """
    # define useful variables
    config = Config()
    data_dir = config.data_path
    config_dir = config.SFA_CONFIG_DIR
    trusted_certs_dir = config.get_trustedroots_dir()
    keyfile = data_dir + os.sep + "server.key"
    certfile = data_dir + os.sep + "server.cert"
    node_gid_file = config_dir + os.sep + "node.gid"
    node_gid = GID(filename=node_gid_file)
    hrn = node_gid.get_hrn()
    interface_hrn = config.SFA_INTERFACE_HRN
    # get credential
    cred = GetCredential(registry=registry, verbose=verbose)
    # make sure server key cert pair exists
    create_server_keypair(keyfile=keyfile, certfile=certfile, hrn=hrn, verbose=verbose)
    registry = server_proxy(url=registry, keyfile=keyfile, certfile=certfile)
            
    if verbose:
        print "Getting current slices on this node"
    # get a list of slices on this node
    from sfa.generic import Generic
    generic=Generic.the_flavour()
    api = generic.make_api(interface='component')
    xids_tuple = api.driver.nodemanager.GetXIDs()
    slices = eval(xids_tuple[1])
    slicenames = slices.keys()

    # generate a list of slices that dont have gids installed
    slices_without_gids = []
    for slicename in slicenames:
        if not os.path.isfile("/vservers/%s/etc/slice.gid" % slicename) \
        or not os.path.isfile("/vservers/%s/etc/node.gid" % slicename):
            slices_without_gids.append(slicename) 
    
    # convert slicenames to hrns
    hrns = [slicename_to_hrn(interface_hrn, slicename) \
            for slicename in slices_without_gids]
    
    # exit if there are no gids to install
    if not hrns:
        return
        
    if verbose:
        print "Getting gids for slices on this node from registry"  
    # get the gids
    # and save them in the right palce
    records = registry.GetGids(hrns, cred)
    for record in records:
        # if this isnt a slice record skip it
        if not record['type'] == 'slice':
            continue
        slicename = hrn_to_pl_slicename(record['hrn'])
        # if this slice isnt really instatiated skip it
        if not os.path.exists("/vservers/%(slicename)s" % locals()):
            continue
       
        # save the slice gid in /etc/sfa/ in the vservers filesystem
        vserver_path = "/vservers/%(slicename)s" % locals()
        gid = record['gid']
        slice_gid_filename = os.sep.join([vserver_path, "etc", "slice.gid"])
        if verbose:
            print "Saving GID for %(slicename)s as %(slice_gid_filename)s" % locals()
        GID(string=gid).save_to_file(slice_gid_filename, save_parents=True)
        # save the node gid in /etc/sfa
        node_gid_filename = os.sep.join([vserver_path, "etc", "node.gid"])
        if verbose:
            print "Saving node GID for %(slicename)s as %(node_gid_filename)s" % locals()
        node_gid.save_to_file(node_gid_filename, save_parents=True) 
Ejemplo n.º 7
0
def get_gids(registry=None, verbose=False):
    """
    Get the gid for all instantiated slices on this node and store it
    in /etc/sfa/slice.gid in the slice's filesystem
    """
    # define useful variables
    config = Config()
    data_dir = config.data_path
    config_dir = config.SFA_CONFIG_DIR
    trusted_certs_dir = config.get_trustedroots_dir()
    keyfile = data_dir + os.sep + "server.key"
    certfile = data_dir + os.sep + "server.cert"
    node_gid_file = config_dir + os.sep + "node.gid"
    node_gid = GID(filename=node_gid_file)
    hrn = node_gid.get_hrn()
    interface_hrn = config.SFA_INTERFACE_HRN
    # get credential
    cred = GetCredential(registry=registry, verbose=verbose)
    # make sure server key cert pair exists
    create_server_keypair(keyfile=keyfile,
                          certfile=certfile,
                          hrn=hrn,
                          verbose=verbose)
    registry = server_proxy(url=registry, keyfile=keyfile, certfile=certfile)

    if verbose:
        print "Getting current slices on this node"
    # get a list of slices on this node
    from sfa.generic import Generic
    generic = Generic.the_flavour()
    api = generic.make_api(interface='component')
    xids_tuple = api.driver.nodemanager.GetXIDs()
    slices = eval(xids_tuple[1])
    slicenames = slices.keys()

    # generate a list of slices that dont have gids installed
    slices_without_gids = []
    for slicename in slicenames:
        if not os.path.isfile("/vservers/%s/etc/slice.gid" % slicename) \
        or not os.path.isfile("/vservers/%s/etc/node.gid" % slicename):
            slices_without_gids.append(slicename)

    # convert slicenames to hrns
    hrns = [slicename_to_hrn(interface_hrn, slicename) \
            for slicename in slices_without_gids]

    # exit if there are no gids to install
    if not hrns:
        return

    if verbose:
        print "Getting gids for slices on this node from registry"
    # get the gids
    # and save them in the right palce
    records = registry.GetGids(hrns, cred)
    for record in records:
        # if this isnt a slice record skip it
        if not record['type'] == 'slice':
            continue
        slicename = hrn_to_pl_slicename(record['hrn'])
        # if this slice isnt really instatiated skip it
        if not os.path.exists("/vservers/%(slicename)s" % locals()):
            continue

        # save the slice gid in /etc/sfa/ in the vservers filesystem
        vserver_path = "/vservers/%(slicename)s" % locals()
        gid = record['gid']
        slice_gid_filename = os.sep.join([vserver_path, "etc", "slice.gid"])
        if verbose:
            print "Saving GID for %(slicename)s as %(slice_gid_filename)s" % locals(
            )
        GID(string=gid).save_to_file(slice_gid_filename, save_parents=True)
        # save the node gid in /etc/sfa
        node_gid_filename = os.sep.join([vserver_path, "etc", "node.gid"])
        if verbose:
            print "Saving node GID for %(slicename)s as %(node_gid_filename)s" % locals(
            )
        node_gid.save_to_file(node_gid_filename, save_parents=True)
Ejemplo n.º 8
0
    def run(self, options):
        config = Config()
        interface_hrn = config.SFA_INTERFACE_HRN
        root_auth = config.SFA_REGISTRY_ROOT_AUTH
        shell = PlShell(config)

        ######## retrieve all existing SFA objects
        all_records = global_dbsession.query(RegRecord).all()

        # create hash by (type,hrn)
        # we essentially use this to know if a given record is already known to SFA
        self.records_by_type_hrn = \
            dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] )
        # create hash by (type,pointer)
        self.records_by_type_pointer = \
            dict ( [ ( (record.type, record.pointer) , record ) for record in all_records
                     if record.pointer != -1] )

        # initialize record.stale to True by default, then mark stale=False on the ones that are in use
        for record in all_records:
            record.stale = True

        ######## retrieve PLC data
        # Get all plc sites
        # retrieve only required stuf
        sites = shell.GetSites({
            'peer_id': None,
            'enabled': True
        }, [
            'site_id', 'login_base', 'node_ids', 'slice_ids', 'person_ids',
            'name'
        ])
        # create a hash of sites by login_base
        #        sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
        # Get all plc users
        persons = shell.GetPersons({
            'peer_id': None,
            'enabled': True
        }, ['person_id', 'email', 'key_ids', 'site_ids', 'role_ids'])
        # create a hash of persons by person_id
        persons_by_id = dict([(person['person_id'], person)
                              for person in persons])
        # also gather non-enabled user accounts so as to issue relevant warnings
        disabled_persons = shell.GetPersons({
            'peer_id': None,
            'enabled': False
        }, ['person_id'])
        disabled_person_ids = [
            person['person_id'] for person in disabled_persons
        ]
        # Get all plc public keys
        # accumulate key ids for keys retrieval
        key_ids = []
        for person in persons:
            key_ids.extend(person['key_ids'])
        keys = shell.GetKeys({
            'peer_id': None,
            'key_id': key_ids,
            'key_type': 'ssh'
        })
        # create a hash of keys by key_id
        keys_by_id = dict([(key['key_id'], key) for key in keys])
        # create a dict person_id -> [ (plc)keys ]
        keys_by_person_id = {}
        for person in persons:
            pubkeys = []
            for key_id in person['key_ids']:
                # by construction all the keys we fetched are ssh keys
                # so gpg keys won't be in there
                try:
                    key = keys_by_id[key_id]
                    pubkeys.append(key)
                except:
                    self.logger.warning(
                        "Could not spot key %d - probably non-ssh" % key_id)
            keys_by_person_id[person['person_id']] = pubkeys
        # Get all plc nodes
        nodes = shell.GetNodes({'peer_id': None},
                               ['node_id', 'hostname', 'site_id'])
        # create hash by node_id
        nodes_by_id = dict([(
            node['node_id'],
            node,
        ) for node in nodes])
        # Get all plc slices
        slices = shell.GetSlices({'peer_id': None},
                                 ['slice_id', 'name', 'person_ids'])
        # create hash by slice_id
        slices_by_id = dict([(slice['slice_id'], slice) for slice in slices])

        # isolate special vini case in separate method
        self.create_special_vini_record(interface_hrn)

        # start importing
        for site in sites:
            try:
                site_sfa_created = shell.GetSiteSfaCreated(site['site_id'])
            except:
                site_sfa_created = None
            if site['name'].startswith('sfa:') or site_sfa_created == 'True':
                continue

            site_hrn = _get_site_hrn(interface_hrn, site)
            # import if hrn is not in list of existing hrns or if the hrn exists
            # but its not a site record
            site_record = self.locate_by_type_hrn('authority', site_hrn)
            if not site_record:
                try:
                    urn = hrn_to_urn(site_hrn, 'authority')
                    if not self.auth_hierarchy.auth_exists(urn):
                        self.auth_hierarchy.create_auth(urn)
                    auth_info = self.auth_hierarchy.get_auth_info(urn)
                    site_record = RegAuthority(
                        hrn=site_hrn,
                        gid=auth_info.get_gid_object(),
                        pointer=site['site_id'],
                        authority=get_authority(site_hrn))
                    site_record.just_created()
                    global_dbsession.add(site_record)
                    global_dbsession.commit()
                    self.logger.info(
                        "PlImporter: imported authority (site) : %s" %
                        site_record)
                    self.remember_record(site_record)
                except:
                    # if the site import fails then there is no point in trying to import the
                    # site's child records (node, slices, persons), so skip them.
                    self.logger.log_exc(
                        "PlImporter: failed to import site %s. Skipping child records"
                        % site_hrn)
                    continue
            else:
                # xxx update the record ...
                pass
            site_record.stale = False

            # import node records
            for node_id in site['node_ids']:
                try:
                    node = nodes_by_id[node_id]
                except:
                    self.logger.warning(
                        "PlImporter: cannot find node_id %s - ignored" %
                        node_id)
                    continue
                site_auth = get_authority(site_hrn)
                site_name = site['login_base']
                node_hrn = hostname_to_hrn(site_auth, site_name,
                                           node['hostname'])
                # xxx this sounds suspicious
                if len(node_hrn) > 64: node_hrn = node_hrn[:64]
                node_record = self.locate_by_type_hrn('node', node_hrn)
                if not node_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(node_hrn, 'node')
                        node_gid = self.auth_hierarchy.create_gid(
                            urn, create_uuid(), pkey)
                        node_record = RegNode(
                            hrn=node_hrn,
                            gid=node_gid,
                            pointer=node['node_id'],
                            authority=get_authority(node_hrn))
                        node_record.just_created()
                        global_dbsession.add(node_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported node: %s" %
                                         node_record)
                        self.remember_record(node_record)
                    except:
                        self.logger.log_exc(
                            "PlImporter: failed to import node %s" % node_hrn)
                        continue
                else:
                    # xxx update the record ...
                    pass
                node_record.stale = False

            site_pis = []
            # import persons
            for person_id in site['person_ids']:
                proceed = False
                if person_id in persons_by_id:
                    person = persons_by_id[person_id]
                    proceed = True
                elif person_id in disabled_person_ids:
                    pass
                else:
                    self.logger.warning(
                        "PlImporter: cannot locate person_id %s in site %s - ignored"
                        % (person_id, site_hrn))
                # make sure to NOT run this if anything is wrong
                if not proceed: continue

                person_hrn = email_to_hrn(site_hrn, person['email'])
                # xxx suspicious again
                if len(person_hrn) > 64: person_hrn = person_hrn[:64]
                person_urn = hrn_to_urn(person_hrn, 'user')

                user_record = self.locate_by_type_hrn('user', person_hrn)

                # return a tuple pubkey (a plc key object) and pkey (a Keypair object)
                def init_person_key(person, plc_keys):
                    pubkey = None
                    if person['key_ids']:
                        # randomly pick first key in set
                        pubkey = plc_keys[0]
                        try:
                            pkey = convert_public_key(pubkey['key'])
                        except:
                            self.logger.warn(
                                'PlImporter: unable to convert public key for %s'
                                % person_hrn)
                            pkey = Keypair(create=True)
                    else:
                        # the user has no keys. Creating a random keypair for the user's gid
                        self.logger.warn(
                            "PlImporter: person %s does not have a PL public key"
                            % person_hrn)
                        pkey = Keypair(create=True)
                    return (pubkey, pkey)

                # new person
                try:
                    plc_keys = keys_by_person_id.get(person['person_id'], [])
                    if not user_record:
                        (pubkey, pkey) = init_person_key(person, plc_keys)
                        person_gid = self.auth_hierarchy.create_gid(
                            person_urn,
                            create_uuid(),
                            pkey,
                            email=person['email'])
                        user_record = RegUser(
                            hrn=person_hrn,
                            gid=person_gid,
                            pointer=person['person_id'],
                            authority=get_authority(person_hrn),
                            email=person['email'])
                        if pubkey:
                            user_record.reg_keys = [
                                RegKey(pubkey['key'], pubkey['key_id'])
                            ]
                        else:
                            self.logger.warning("No key found for user %s" %
                                                user_record)
                        user_record.just_created()
                        global_dbsession.add(user_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported person: %s" %
                                         user_record)
                        self.remember_record(user_record)
                    else:
                        # update the record ?
                        #
                        # if a user key has changed then we need to update the
                        # users gid by forcing an update here
                        #
                        # right now, SFA only has *one* key attached to a user, and this is
                        # the key that the GID was made with
                        # so the logic here is, we consider that things are OK (unchanged) if
                        # all the SFA keys are present as PLC keys
                        # otherwise we trigger the creation of a new gid from *some* plc key
                        # and record this on the SFA side
                        # it would make sense to add a feature in PLC so that one could pick a 'primary'
                        # key but this is not available on the myplc side for now
                        # = or = it would be much better to support several keys in SFA but that
                        # does not seem doable without a major overhaul in the data model as
                        # a GID is attached to a hrn, but it's also linked to a key, so...
                        # NOTE: with this logic, the first key entered in PLC remains the one
                        # current in SFA until it is removed from PLC
                        sfa_keys = user_record.reg_keys

                        def sfa_key_in_list(sfa_key, plc_keys):
                            for plc_key in plc_keys:
                                if plc_key['key'] == sfa_key.key:
                                    return True
                            return False

                        # are all the SFA keys known to PLC ?
                        new_keys = False
                        if not sfa_keys and plc_keys:
                            new_keys = True
                        else:
                            for sfa_key in sfa_keys:
                                if not sfa_key_in_list(sfa_key, plc_keys):
                                    new_keys = True
                        if new_keys:
                            (pubkey, pkey) = init_person_key(person, plc_keys)
                            person_gid = self.auth_hierarchy.create_gid(
                                person_urn, create_uuid(), pkey)
                            person_gid.set_email(person['email'])
                            if not pubkey:
                                user_record.reg_keys = []
                            else:
                                user_record.reg_keys = [
                                    RegKey(pubkey['key'], pubkey['key_id'])
                                ]
                            user_record.gid = person_gid
                            user_record.just_updated()
                            self.logger.info("PlImporter: updated person: %s" %
                                             user_record)
                    user_record.email = person['email']
                    global_dbsession.commit()
                    user_record.stale = False
                    # accumulate PIs - PLCAPI has a limitation that when someone has PI role
                    # this is valid for all sites she is in..
                    # PI is coded with role_id==20
                    if 20 in person['role_ids']:
                        site_pis.append(user_record)
                except:
                    self.logger.log_exc(
                        "PlImporter: failed to import person %d %s" %
                        (person['person_id'], person['email']))

            # maintain the list of PIs for a given site
            # for the record, Jordan had proposed the following addition as a welcome hotfix to a previous version:
            # site_pis = list(set(site_pis))
            # this was likely due to a bug in the above logic, that had to do with disabled persons
            # being improperly handled, and where the whole loop on persons
            # could be performed twice with the same person...
            # so hopefully we do not need to eliminate duplicates explicitly here anymore
            site_record.reg_pis = list(set(site_pis))
            global_dbsession.commit()

            # import slices
            for slice_id in site['slice_ids']:
                try:
                    slice = slices_by_id[slice_id]
                except:
                    self.logger.warning(
                        "PlImporter: cannot locate slice_id %s - ignored" %
                        slice_id)
                slice_hrn = slicename_to_hrn(interface_hrn, slice['name'])
                slice_record = self.locate_by_type_hrn('slice', slice_hrn)
                if not slice_record:
                    try:
                        pkey = Keypair(create=True)
                        urn = hrn_to_urn(slice_hrn, 'slice')
                        slice_gid = self.auth_hierarchy.create_gid(
                            urn, create_uuid(), pkey)
                        slice_record = RegSlice(
                            hrn=slice_hrn,
                            gid=slice_gid,
                            pointer=slice['slice_id'],
                            authority=get_authority(slice_hrn))
                        slice_record.just_created()
                        global_dbsession.add(slice_record)
                        global_dbsession.commit()
                        self.logger.info("PlImporter: imported slice: %s" %
                                         slice_record)
                        self.remember_record(slice_record)
                    except:
                        self.logger.log_exc(
                            "PlImporter: failed to import slice %s (%s)" %
                            (slice_hrn, slice['name']))
                else:
                    # xxx update the record ...
                    # given that we record the current set of users anyways, there does not seem to be much left to do here
                    # self.logger.warning ("Slice update not yet implemented on slice %s (%s)"%(slice_hrn,slice['name']))
                    pass
                # record current users affiliated with the slice
                slice_record.reg_researchers = \
                    [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['person_ids'] ]
                global_dbsession.commit()
                slice_record.stale = False

        ### remove stale records
        # special records must be preserved
        system_hrns = [
            interface_hrn, root_auth, interface_hrn + '.slicemanager'
        ]
        for record in all_records:
            if record.hrn in system_hrns:
                record.stale = False
            if record.peer_authority:
                record.stale = False
            if ".vini" in interface_hrn and interface_hrn.endswith('vini') and \
                record.hrn.endswith("internet2"):
                record.stale = False

        for record in all_records:
            try:
                stale = record.stale
            except:
                stale = True
                self.logger.warning("stale not found with %s" % record)
            if stale:
                self.logger.info("PlImporter: deleting stale record: %s" %
                                 record)
                global_dbsession.delete(record)
                global_dbsession.commit()
Ejemplo n.º 9
0
    def fill_record_hrns(self, records):
        """
        convert pl ids to hrns
        """

        # get ids
        slice_ids, person_ids, site_ids, node_ids = [], [], [], []
        for record in records:
            if 'site_id' in record:
                site_ids.append(record['site_id'])
            if 'site_ids' in record:
                site_ids.extend(record['site_ids'])
            if 'person_ids' in record:
                person_ids.extend(record['person_ids'])
            if 'slice_ids' in record:
                slice_ids.extend(record['slice_ids'])
            if 'node_ids' in record:
                node_ids.extend(record['node_ids'])

        # get pl records
        slices, persons, sites, nodes = {}, {}, {}, {}
        if site_ids:
            site_list = self.shell.GetSites(
                {
                    'peer_id': None,
                    'site_id': site_ids
                }, ['site_id', 'login_base'])
            sites = list_to_dict(site_list, 'site_id')
        if person_ids:
            person_list = self.shell.GetPersons(
                {
                    'peer_id': None,
                    'person_id': person_ids
                }, ['person_id', 'email'])
            persons = list_to_dict(person_list, 'person_id')
        if slice_ids:
            slice_list = self.shell.GetSlices(
                {
                    'peer_id': None,
                    'slice_id': slice_ids
                }, ['slice_id', 'name'])
            slices = list_to_dict(slice_list, 'slice_id')
        if node_ids:
            node_list = self.shell.GetNodes(
                {
                    'peer_id': None,
                    'node_id': node_ids
                }, ['node_id', 'hostname'])
            nodes = list_to_dict(node_list, 'node_id')

        # convert ids to hrns
        for record in records:
            # get all relevant data
            type = record['type']
            pointer = record['pointer']
            auth_hrn = self.hrn
            login_base = ''
            if pointer == -1:
                continue

            if 'site_id' in record:
                site = sites[record['site_id']]
                login_base = site['login_base']
                record['site'] = ".".join([auth_hrn, login_base])
            if 'person_ids' in record:
                emails = [persons[person_id]['email'] for person_id in record['person_ids'] \
                          if person_id in  persons]
                usernames = [email.split('@')[0] for email in emails]
                person_hrns = [
                    ".".join([auth_hrn, login_base, username])
                    for username in usernames
                ]
                record['persons'] = person_hrns
            if 'slice_ids' in record:
                slicenames = [slices[slice_id]['name'] for slice_id in record['slice_ids'] \
                              if slice_id in slices]
                slice_hrns = [
                    slicename_to_hrn(auth_hrn, slicename)
                    for slicename in slicenames
                ]
                record['slices'] = slice_hrns
            if 'node_ids' in record:
                hostnames = [nodes[node_id]['hostname'] for node_id in record['node_ids'] \
                             if node_id in nodes]
                node_hrns = [
                    hostname_to_hrn(auth_hrn, login_base, hostname)
                    for hostname in hostnames
                ]
                record['nodes'] = node_hrns
            if 'site_ids' in record:
                login_bases = [sites[site_id]['login_base'] for site_id in record['site_ids'] \
                               if site_id in sites]
                site_hrns = [
                    ".".join([auth_hrn, lbase]) for lbase in login_bases
                ]
                record['sites'] = site_hrns

            if 'expires' in record:
                date = utcparse(record['expires'])
                datestring = datetime_to_string(date)
                record['expires'] = datestring

        return records