def get_key_from_incoming_ip (self, api): dbsession=api.dbsession() # verify that the callers's ip address exist in the db and is an interface # for a node in the db (ip, port) = api.remote_addr interfaces = api.driver.shell.GetInterfaces({'ip': ip}, ['node_id']) if not interfaces: raise NonExistingRecord("no such ip %(ip)s" % locals()) nodes = api.driver.shell.GetNodes([interfaces[0]['node_id']], ['node_id', 'hostname']) if not nodes: raise NonExistingRecord("no such node using ip %(ip)s" % locals()) node = nodes[0] # look up the sfa record record=dbsession.query(RegRecord).filter_by(type='node',pointer=node['node_id']).first() if not record: raise RecordNotFound("node with pointer %s"%node['node_id']) # generate a new keypair and gid uuid = create_uuid() pkey = Keypair(create=True) urn = hrn_to_urn(record.hrn, record.type) gid_object = api.auth.hierarchy.create_gid(urn, uuid, pkey) gid = gid_object.save_to_string(save_parents=True) record.gid = gid # update the record dbsession.commit() # attempt the scp the key # and gid onto the node # this will only work for planetlab based components (kfd, key_filename) = tempfile.mkstemp() (gfd, gid_filename) = tempfile.mkstemp() pkey.save_to_file(key_filename) gid_object.save_to_file(gid_filename, save_parents=True) host = node['hostname'] key_dest="/etc/sfa/node.key" gid_dest="/etc/sfa/node.gid" scp = "/usr/bin/scp" #identity = "/etc/planetlab/root_ssh_key.rsa" identity = "/etc/sfa/root_ssh_key" scp_options=" -i %(identity)s " % locals() scp_options+="-o StrictHostKeyChecking=no " % locals() scp_key_command="%(scp)s %(scp_options)s %(key_filename)s root@%(host)s:%(key_dest)s" %\ locals() scp_gid_command="%(scp)s %(scp_options)s %(gid_filename)s root@%(host)s:%(gid_dest)s" %\ locals() all_commands = [scp_key_command, scp_gid_command] for command in all_commands: (status, output) = commands.getstatusoutput(command) if status: raise Exception, output for filename in [key_filename, gid_filename]: os.unlink(filename) return 1
def create_interface_records(self): """ Create a record for each SFA interface """ # just create certs for all sfa interfaces even if they # aren't enabled auth_info = self.auth_hierarchy.get_auth_info( self.config.SFA_INTERFACE_HRN) pkey = auth_info.get_pkey_object() hrn = self.config.SFA_INTERFACE_HRN for type in [ 'authority+sa', 'authority+am', 'authority+sm', ]: urn = hrn_to_urn(hrn, type) gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) # for now we have to preserve the authority+<> stuff if self.record_exists(type, hrn): continue interface_record = RegAuthority(type=type, hrn=hrn, gid=gid, authority=get_authority(hrn)) interface_record.just_created() global_dbsession.add(interface_record) global_dbsession.commit() self.logger.info("SfaImporter: imported authority (%s) %s " % (type, interface_record))
def sign(options): """ Sign the specified gid """ hierarchy = Hierarchy() config = Config() default_authority = config.SFA_INTERFACE_HRN auth_info = hierarchy.get_auth_info(default_authority) # load the gid gidfile = os.path.abspath(options.sign) if not os.path.isfile(gidfile): print "no such gid: %s" % gidfile sys.exit(1) gid = GID(filename=gidfile) # extract pub_key and create new gid pkey = gid.get_pubkey() urn = gid.get_urn() gid = hierarchy.create_gid(urn, create_uuid(), pkey) # get the outfile outfile = options.outfile if not outfile: outfile = os.path.abspath('./signed-%s.gid' % gid.get_hrn()) # save the signed gid if options.verbose: print "Writing signed gid %s" % outfile gid.save_to_file(outfile, save_parents=True)
def import_slice(self, parent_hrn, slice): slicename = slice['name'].split("_",1)[-1] slicename = _cleanup_string(slicename) if not slicename: self.logger.error("Import: failed to parse slice name %s" %slice['name']) return hrn = parent_hrn + "." + slicename self.logger.info("Import: slice %s"%hrn) pkey = Keypair(create=True) urn = hrn_to_urn(hrn, 'slice') slice_gid = self.AuthHierarchy.create_gid(urn, create_uuid(), pkey) slice_record = SfaRecord(hrn=hrn, gid=slice_gid, type="slice", pointer=slice['slice_id']) slice_record['authority'] = get_authority(slice_record['hrn']) table = SfaTable() existing_records = table.find({'hrn': hrn, 'type': 'slice', 'pointer': slice['slice_id']}) if not existing_records: table.insert(slice_record) else: self.logger.info("Import: %s exists, updating " % hrn) existing_record = existing_records[0] slice_record['record_id'] = existing_record['record_id'] table.update(slice_record)
def import_users(self, existing_hrns, existing_records): # Get all users users = self.shell.auth_manager.users.list() users_dict = {} keys_filename = self.config.config_path + os.sep + 'person_keys.py' old_user_keys = load_keys(keys_filename) user_keys = {} for user in users: auth_hrn = self.config.SFA_INTERFACE_HRN if user.tenantId is not None: tenant = self.shell.auth_manager.tenants.find(id=user.tenantId) auth_hrn = OSXrn(name=tenant.name, auth=self.config.SFA_INTERFACE_HRN, type='authority').get_hrn() hrn = OSXrn(name=user.name, auth=auth_hrn, type='user').get_hrn() users_dict[hrn] = user old_keys = old_user_keys.get(hrn, []) keyname = OSXrn(xrn=hrn, type='user').get_slicename() keys = [ k.public_key for k in self.shell.nova_manager.keypairs.findall(name=keyname) ] user_keys[hrn] = keys update_record = False if old_keys != keys: update_record = True if hrn not in existing_hrns or \ (hrn, 'user') not in existing_records or update_record: urn = OSXrn(xrn=hrn, type='user').get_urn() if keys: try: pkey = convert_public_key(keys[0]) except: self.logger.log_exc( 'unable to convert public key for %s' % hrn) pkey = Keypair(create=True) else: self.logger.warn( "OpenstackImporter: person %s does not have a PL public key" % hrn) pkey = Keypair(create=True) user_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey, email=user.email) user_record = RegUser() user_record.type = 'user' user_record.hrn = hrn user_record.gid = user_gid user_record.authority = get_authority(hrn) global_dbsession.add(user_record) global_dbsession.commit() self.logger.info("OpenstackImporter: imported person %s" % user_record) return users_dict, user_keys
def CreateGid(self, api, xrn, cert): # get the authority authority = Xrn(xrn=xrn).get_authority_hrn() auth_info = api.auth.get_auth_info(authority) if not cert: pkey = Keypair(create=True) else: certificate = Certificate(string=cert) pkey = certificate.get_pubkey() gid = api.auth.hierarchy.create_gid(xrn, create_uuid(), pkey) return gid.save_to_string(save_parents=True)
def import_slice(self, slice_hrn, slice_record, user_record): """ Create RegSlice record according to the slice hrn if the slice does not exist yet.Creates a relationship with the user record associated with the slice. Commit the record to the database. :param slice_hrn: Human readable name of the slice. :type slice_hrn: string :param slice_record: record of the slice found in the DB, if any. :type slice_record: RegSlice or None :param user_record: user record found in the DB if any. :type user_record: RegUser .. todo::Update the record if a slice record already exists. """ if not slice_record: pkey = Keypair(create=True) urn = hrn_to_urn(slice_hrn, 'slice') slice_gid = \ self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) slice_record = RegSlice(hrn=slice_hrn, gid=slice_gid, pointer='-1', authority=get_authority(slice_hrn)) try: slice_record.just_created() global_dbsession.add(slice_record) global_dbsession.commit() self.update_just_added_records_dict(slice_record) except SQLAlchemyError: self.logger.log_exc("IotlabImporter: failed to import slice") #No slice update upon import in iotlab else: # xxx update the record ... self.logger.warning("Iotlab Slice update not implemented") # record current users affiliated with the slice slice_record.reg_researchers = [user_record] try: global_dbsession.commit() slice_record.stale = False except SQLAlchemyError: self.logger.log_exc("IotlabImporter: failed to update slice")
def import_users(self, existing_hrns, existing_records): # Get all users users = self.shell.auth_manager.users.list() users_dict = {} keys_filename = self.config.config_path + os.sep + 'person_keys.py' old_user_keys = load_keys(keys_filename) user_keys = {} for user in users: auth_hrn = self.config.SFA_INTERFACE_HRN try: user_tenantId = user.tenantId user_email = user.email except AttributeError, e: user_tenantId = None user_email = None if user_tenantId is not None: #import pdb; pdb.set_trace() tenant = self.shell.auth_manager.tenants.find(id=user_tenantId) if tenant is not None: auth_hrn = OSXrn(name=tenant.name, auth=self.config.SFA_INTERFACE_HRN, type='authority').get_hrn() hrn = OSXrn(name=user.name, auth=auth_hrn, type='user').get_hrn() users_dict[hrn] = user old_keys = old_user_keys.get(hrn, []) keyname = OSXrn(xrn=hrn, type='user').get_slicename() keys = [k.public_key for k in self.shell.compute_manager.keypairs.findall(name=keyname)] user_keys[hrn] = keys update_record = False if old_keys != keys: update_record = True if hrn not in existing_hrns or \ (hrn, 'user') not in existing_records or update_record: urn = OSXrn(xrn=hrn, type='user').get_urn() if keys: try: pkey = convert_public_key(keys[0]) except: self.logger.log_exc('unable to convert public key for %s' % hrn) pkey = Keypair(create=True) else: self.logger.warn("OpenstackImporter: person %s does not have a PL public key"%hrn) pkey = Keypair(create=True) user_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey, email=user_email) user_record = RegUser(type='user', hrn=hrn, gid=user_gid, authority=get_authority(hrn)) global_dbsession.add(user_record) global_dbsession.commit() self.logger.info("OpenstackImporter: imported person %s" % user_record)
def import_tenants(self, existing_hrns, existing_records): # Get all tenants # A tenant can represent an organizational group (site) or a # slice. If a tenant's authorty/parent matches the root authority it is # considered a group/site. All other tenants are considered slices. tenants = self.shell.auth_manager.tenants.list() tenants_dict = {} for tenant in tenants: hrn = self.config.SFA_INTERFACE_HRN + '.' + tenant.name tenants_dict[hrn] = tenant authority_hrn = OSXrn(xrn=hrn, type='authority').get_authority_hrn() if hrn in existing_hrns: continue if authority_hrn == self.config.SFA_INTERFACE_HRN: # import group/site record = RegAuthority() urn = OSXrn(xrn=hrn, type='authority').get_urn() if not self.auth_hierarchy.auth_exists(urn): self.auth_hierarchy.create_auth(urn) auth_info = self.auth_hierarchy.get_auth_info(urn) gid = auth_info.get_gid_object() record.type = 'authority' record.hrn = hrn record.gid = gid record.authority = get_authority(hrn) global_dbsession.add(record) global_dbsession.commit() self.logger.info("OpenstackImporter: imported authority: %s" % record) else: record = RegSlice() urn = OSXrn(xrn=hrn, type='slice').get_urn() pkey = Keypair(create=True) gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) record.type = 'slice' record.hrn = hrn record.gid = gid record.authority = get_authority(hrn) global_dbsession.add(record) global_dbsession.commit() self.logger.info("OpenstackImporter: imported slice: %s" % record) return tenants_dict
def import_person(self, parent_hrn, person): """ Register a user record """ hrn = email_to_hrn(parent_hrn, person['email']) # ASN.1 will have problems with hrn's longer than 64 characters if len(hrn) > 64: hrn = hrn[:64] self.logger.info("Import: person %s"%hrn) key_ids = [] if 'key_ids' in person and person['key_ids']: key_ids = person["key_ids"] # get the user's private key from the SSH keys they have uploaded # to planetlab keys = self.shell.GetKeys(self.plc_auth, key_ids) key = keys[0]['key'] pkey = None try: pkey = convert_public_key(key) except: self.logger.warn('unable to convert public key for %s' % hrn) if not pkey: pkey = Keypair(create=True) else: # the user has no keys self.logger.warn("Import: person %s does not have a PL public key"%hrn) # if a key is unavailable, then we still need to put something in the # user's GID. So make one up. pkey = Keypair(create=True) # create the gid urn = hrn_to_urn(hrn, 'user') person_gid = self.AuthHierarchy.create_gid(urn, create_uuid(), pkey) table = SfaTable() person_record = SfaRecord(hrn=hrn, gid=person_gid, type="user", pointer=person['person_id']) person_record['authority'] = get_authority(person_record['hrn']) existing_records = table.find({'hrn': hrn, 'type': 'user', 'pointer': person['person_id']}) if not existing_records: table.insert(person_record) else: self.logger.info("Import: %s exists, updating " % hrn) existing_record = existing_records[0] person_record['record_id'] = existing_record['record_id'] table.update(person_record)
def import_tenants(self, existing_hrns, existing_records): # Get all tenants # A tenant can represent an organizational group (site) or a # slice. If a tenant's authorty/parent matches the root authority it is # considered a group/site. All other tenants are considered slices. tenants = self.shell.auth_manager.tenants.list() tenants_dict = {} for tenant in tenants: hrn = self.config.SFA_INTERFACE_HRN + '.' + tenant.name tenants_dict[hrn] = tenant authority_hrn = OSXrn(xrn=hrn, type='authority').get_authority_hrn() if hrn in existing_hrns: continue if authority_hrn == self.config.SFA_INTERFACE_HRN: # import group/site record = RegAuthority() urn = OSXrn(xrn=hrn, type='authority').get_urn() if not self.auth_hierarchy.auth_exists(urn): self.auth_hierarchy.create_auth(urn) auth_info = self.auth_hierarchy.get_auth_info(urn) gid = auth_info.get_gid_object() record.type='authority' record.hrn=hrn record.gid=gid record.authority=get_authority(hrn) dbsession.add(record) dbsession.commit() self.logger.info("OpenstackImporter: imported authority: %s" % record) else: record = RegSlice () urn = OSXrn(xrn=hrn, type='slice').get_urn() pkey = Keypair(create=True) gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) record.type='slice' record.hrn=hrn record.gid=gid record.authority=get_authority(hrn) dbsession.add(record) dbsession.commit() self.logger.info("OpenstackImporter: imported slice: %s" % record) return tenants_dict
def CreateGid(self, api, xrn, cert): # get the authority authority = Xrn(xrn=xrn).get_authority_hrn() auth_info = api.auth.get_auth_info(authority) if not cert: pkey = Keypair(create=True) else: certificate = Certificate(string=cert) pkey = certificate.get_pubkey() # Add the email of the user to SubjectAltName in the GID email = None hrn = Xrn(xrn).get_hrn() dbsession=api.dbsession() record=dbsession.query(RegUser).filter_by(hrn=hrn).first() if record: email=getattr(record,'email',None) gid = api.auth.hierarchy.create_gid(xrn, create_uuid(), pkey, email = email) return gid.save_to_string(save_parents=True)
def create_interface_records(self): """ Create a record for each SFA interface """ # just create certs for all sfa interfaces even if they # aren't enabled auth_info = self.auth_hierarchy.get_auth_info(self.config.SFA_INTERFACE_HRN) pkey = auth_info.get_pkey_object() hrn=self.config.SFA_INTERFACE_HRN for type in [ 'authority+sa', 'authority+am', 'authority+sm', ]: urn = hrn_to_urn(hrn, type) gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) # for now we have to preserve the authority+<> stuff if self.record_exists (type,hrn): continue interface_record = RegAuthority(type=type, hrn=hrn, gid=gid, authority=get_authority(hrn)) interface_record.just_created() dbsession.add (interface_record) dbsession.commit() self.logger.info("SfaImporter: imported authority (%s) %s " % (type,interface_record))
def create_interface_records(self): """ Create a record for each SFA interface """ # just create certs for all sfa interfaces even if they # arent enabled interface_hrn = self.config.SFA_INTERFACE_HRN interfaces = ['authority+sa', 'authority+am', 'authority+sm'] table = SfaTable() auth_info = self.AuthHierarchy.get_auth_info(interface_hrn) pkey = auth_info.get_pkey_object() for interface in interfaces: interface_record = table.find({'type': interface, 'hrn': interface_hrn}) if not interface_record: self.logger.info("Import: interface %s %s " % (interface_hrn, interface)) urn = hrn_to_urn(interface_hrn, interface) gid = self.AuthHierarchy.create_gid(urn, create_uuid(), pkey) record = SfaRecord(hrn=interface_hrn, gid=gid, type=interface, pointer=-1) record['authority'] = get_authority(interface_hrn) table.insert(record)
def import_node(self, hrn, node): self.logger.info("Import: node %s" % hrn) # ASN.1 will have problems with hrn's longer than 64 characters if len(hrn) > 64: hrn = hrn[:64] table = SfaTable() node_record = table.find({'type': 'node', 'hrn': hrn}) pkey = Keypair(create=True) urn = hrn_to_urn(hrn, 'node') node_gid = self.AuthHierarchy.create_gid(urn, create_uuid(), pkey) node_record = SfaRecord(hrn=hrn, gid=node_gid, type="node", pointer=node['node_id']) node_record['authority'] = get_authority(node_record['hrn']) existing_records = table.find({'hrn': hrn, 'type': 'node', 'pointer': node['node_id']}) if not existing_records: table.insert(node_record) else: self.logger.info("Import: %s exists, updating " % hrn) existing_record = existing_records[0] node_record['record_id'] = existing_record['record_id'] table.update(node_record)
def call(self): # verify that the callers's ip address exist in the db and is an inteface # for a node in the db (ip, port) = self.api.remote_addr interfaces = self.api.plshell.GetInterfaces(self.api.plauth, {'ip': ip}, ['node_id']) if not interfaces: raise NonExistingRecord("no such ip %(ip)s" % locals()) nodes = self.api.plshell.GetNodes(self.api.plauth, [interfaces[0]['node_id']], ['node_id', 'hostname']) if not nodes: raise NonExistingRecord("no such node using ip %(ip)s" % locals()) node = nodes[0] # look up the sfa record table = SfaTable() records = table.findObjects({'type': 'node', 'pointer': node['node_id']}) if not records: raise RecordNotFound("pointer:" + str(node['node_id'])) record = records[0] # generate a new keypair and gid uuid = create_uuid() pkey = Keypair(create=True) urn = hrn_to_urn(record['hrn'], record['type']) gid_object = self.api.auth.hierarchy.create_gid(urn, uuid, pkey) gid = gid_object.save_to_string(save_parents=True) record['gid'] = gid record.set_gid(gid) # update the record table.update(record) # attempt the scp the key # and gid onto the node # this will only work for planetlab based components (kfd, key_filename) = tempfile.mkstemp() (gfd, gid_filename) = tempfile.mkstemp() pkey.save_to_file(key_filename) gid_object.save_to_file(gid_filename, save_parents=True) host = node['hostname'] key_dest="/etc/sfa/node.key" gid_dest="/etc/sfa/node.gid" scp = "/usr/bin/scp" #identity = "/etc/planetlab/root_ssh_key.rsa" identity = "/etc/sfa/root_ssh_key" scp_options=" -i %(identity)s " % locals() scp_options+="-o StrictHostKeyChecking=no " % locals() scp_key_command="%(scp)s %(scp_options)s %(key_filename)s root@%(host)s:%(key_dest)s" %\ locals() scp_gid_command="%(scp)s %(scp_options)s %(gid_filename)s root@%(host)s:%(gid_dest)s" %\ locals() all_commands = [scp_key_command, scp_gid_command] for command in all_commands: (status, output) = commands.getstatusoutput(command) if status: raise Exception, output for filename in [key_filename, gid_filename]: os.unlink(filename) return 1
def update(api, record_dict): new_record = SfaRecord(dict = record_dict) type = new_record['type'] hrn = new_record['hrn'] urn = hrn_to_urn(hrn,type) table = SfaTable() # make sure the record exists records = table.findObjects({'type': type, 'hrn': hrn}) if not records: raise RecordNotFound(hrn) record = records[0] record['last_updated'] = time.gmtime() # Update_membership needs the membership lists in the existing record # filled in, so it can see if members were added or removed api.fill_record_info(record) # Use the pointer from the existing record, not the one that the user # gave us. This prevents the user from inserting a forged pointer pointer = record['pointer'] # update the PLC information that was specified with the record if (type == "authority"): api.plshell.UpdateSite(api.plauth, pointer, new_record) elif type == "slice": pl_record=api.sfa_fields_to_pl_fields(type, hrn, new_record) if 'name' in pl_record: pl_record.pop('name') api.plshell.UpdateSlice(api.plauth, pointer, pl_record) elif type == "user": # SMBAKER: UpdatePerson only allows a limited set of fields to be # updated. Ideally we should have a more generic way of doing # this. I copied the field names from UpdatePerson.py... update_fields = {} all_fields = new_record for key in all_fields.keys(): if key in ['first_name', 'last_name', 'title', 'email', 'password', 'phone', 'url', 'bio', 'accepted_aup', 'enabled']: update_fields[key] = all_fields[key] api.plshell.UpdatePerson(api.plauth, pointer, update_fields) if 'key' in new_record and new_record['key']: # must check this key against the previous one if it exists persons = api.plshell.GetPersons(api.plauth, [pointer], ['key_ids']) person = persons[0] keys = person['key_ids'] keys = api.plshell.GetKeys(api.plauth, person['key_ids']) key_exists = False if isinstance(new_record['key'], types.ListType): new_key = new_record['key'][0] else: new_key = new_record['key'] # Delete all stale keys for key in keys: if new_record['key'] != key['key']: api.plshell.DeleteKey(api.plauth, key['key_id']) else: key_exists = True if not key_exists: api.plshell.AddPersonKey(api.plauth, pointer, {'key_type': 'ssh', 'key': new_key}) # update the openssl key and gid pkey = convert_public_key(new_key) uuid = create_uuid() gid_object = api.auth.hierarchy.create_gid(urn, uuid, pkey) gid = gid_object.save_to_string(save_parents=True) record['gid'] = gid record = SfaRecord(dict=record) table.update(record) elif type == "node": api.plshell.UpdateNode(api.plauth, pointer, new_record) else: raise UnknownSfaType(type) # update membership for researchers, pis, owners, operators api.update_membership(record, new_record) return 1
def import_persons_and_slices(self, testbed_shell): """ Gets user data from LDAP, process the information. Creates hrn for the user's slice, the user's gid, creates the RegUser record associated with user. Creates the RegKey record associated nwith the user's key. Saves those records into the SFA DB. import the user's slice onto the database as well by calling import_slice. :param testbed_shell: IotlabDriver object, used to have access to testbed_shell attributes. :type testbed_shell: IotlabDriver .. warning:: does not support multiple keys per user """ ldap_person_listdict = testbed_shell.GetPersons() self.logger.info("IOTLABIMPORT \t ldap_person_listdict %s \r\n" % (ldap_person_listdict)) # import persons for person in ldap_person_listdict: self.logger.info("IotlabImporter: person :" % (person)) if 'ssh-rsa' not in person['pkey']: #people with invalid ssh key (ssh-dss, empty, bullshit keys...) #won't be imported continue person_hrn = person['hrn'] slice_hrn = self.slicename_to_hrn(person['hrn']) # xxx suspicious again if len(person_hrn) > 64: person_hrn = person_hrn[:64] person_urn = hrn_to_urn(person_hrn, 'user') self.logger.info("IotlabImporter: users_rec_by_email %s " % (self.users_rec_by_email)) #Check if user using person['email'] from LDAP is already registered #in SFA. One email = one person. In this case, do not create another #record for this person #person_hrn returned by GetPerson based on iotlab root auth + #uid ldap user_record = self.find_record_by_type_hrn('user', person_hrn) if not user_record and person['email'] in self.users_rec_by_email: user_record = self.users_rec_by_email[person['email']] person_hrn = user_record.hrn person_urn = hrn_to_urn(person_hrn, 'user') slice_record = self.find_record_by_type_hrn('slice', slice_hrn) iotlab_key = person['pkey'] # new person if not user_record: (pubkey, pkey) = self.init_person_key(person, iotlab_key) if pubkey is not None and pkey is not None: person_gid = \ self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey) if person['email']: self.logger.debug("IOTLAB IMPORTER \ PERSON EMAIL OK email %s " % (person['email'])) person_gid.set_email(person['email']) user_record = \ RegUser(hrn=person_hrn, gid=person_gid, pointer='-1', authority=get_authority(person_hrn), email=person['email']) else: user_record = \ RegUser(hrn=person_hrn, gid=person_gid, pointer='-1', authority=get_authority(person_hrn)) if pubkey: user_record.reg_keys = [RegKey(pubkey)] else: self.logger.warning("No key found for user %s" % (user_record)) try: user_record.just_created() global_dbsession.add(user_record) global_dbsession.commit() self.logger.info("IotlabImporter: imported person \ %s" % (user_record)) self.update_just_added_records_dict(user_record) except SQLAlchemyError: self.logger.log_exc("IotlabImporter: \ failed to import person %s" % (person)) else: # update the record ? # if user's primary key has changed then we need to update # the users gid by forcing an update here sfa_keys = user_record.reg_keys new_key = False if iotlab_key is not sfa_keys: new_key = True if new_key: self.logger.info("IotlabImporter: \t \t USER UPDATE \ person: %s" % (person['hrn'])) (pubkey, pkey) = self.init_person_key(person, iotlab_key) person_gid = \ self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey) if not pubkey: user_record.reg_keys = [] else: user_record.reg_keys = [RegKey(pubkey)] self.logger.info("IotlabImporter: updated person: %s" % (user_record)) if person['email']: user_record.email = person['email'] try: global_dbsession.commit() user_record.stale = False except SQLAlchemyError: self.logger.log_exc("IotlabImporter: \ failed to update person %s" % (person)) self.import_slice(slice_hrn, slice_record, user_record)
def import_nodes(self, site_node_ids, nodes_by_id, testbed_shell): """ Creates appropriate hostnames and RegNode records for each node in site_node_ids, based on the information given by the dict nodes_by_id that was made from data from OAR. Saves the records to the DB. :param site_node_ids: site's node ids :type site_node_ids: list of integers :param nodes_by_id: dictionary , key is the node id, value is the a dict with node information. :type nodes_by_id: dictionary :param testbed_shell: IotlabDriver object, used to have access to testbed_shell attributes. :type testbed_shell: IotlabDriver :returns: None :rtype: None """ for node_id in site_node_ids: try: node = nodes_by_id[node_id] except KeyError: self.logger.warning("IotlabImporter: cannot find node_id %s \ - ignored" % (node_id)) continue escaped_hrn = \ self.hostname_to_hrn_escaped(testbed_shell.root_auth, node['hostname']) self.logger.info("IOTLABIMPORTER node %s " % (node)) hrn = node['hrn'] # xxx this sounds suspicious if len(hrn) > 64: hrn = hrn[:64] node_record = self.find_record_by_type_hrn('node', hrn) if not node_record: pkey = Keypair(create=True) urn = hrn_to_urn(escaped_hrn, 'node') node_gid = \ self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) def testbed_get_authority(hrn): """ Gets the authority part in the hrn. :param hrn: hrn whose authority we are looking for. :type hrn: string :returns: splits the hrn using the '.' separator and returns the authority part of the hrn. :rtype: string """ return hrn.split(".")[0] node_record = RegNode(hrn=hrn, gid=node_gid, pointer='-1', authority=testbed_get_authority(hrn)) try: node_record.just_created() global_dbsession.add(node_record) global_dbsession.commit() self.logger.info("IotlabImporter: imported node: %s" % node_record) self.update_just_added_records_dict(node_record) except SQLAlchemyError: self.logger.log_exc( "IotlabImporter: failed to import node") else: #TODO: xxx update the record ... pass node_record.stale = False
def run(self, options): config = Config() interface_hrn = config.SFA_INTERFACE_HRN root_auth = config.SFA_REGISTRY_ROOT_AUTH shell = ClabShell(config) # retrieve all existing SFA objects all_records = global_dbsession.query(RegRecord).all() # Delete all default records #for record in all_records: # global_dbsession.delete(record) # global_dbsession.commit() #all_records = global_dbsession.query(RegRecord).all() # Dicts to avoid duplicates in SFA database # create dict keyed by (type,hrn) self.records_by_type_hrn = dict([((record.type, record.hrn), record) for record in all_records]) # create dict keyed by (type,pointer) self.records_by_type_pointer = dict([((record.type, record.pointer), record) for record in all_records if record.pointer != -1]) # initialize record.stale to True by default, then mark stale=False on the ones that are in use for record in all_records: record.stale = True # Retrieve data from the CLab testbed and create dictionaries by id # SITE sites = [shell.get_testbed_info()] # USERS users = shell.get_users({}) #users_by_id = dict ( [ ( user['id'], user) for user in users ] ) # KEYS # auth_tokens of the users. Dict (user_id:[keys]) # NODES nodes = shell.get_nodes({}) # SLICES slices = shell.get_slices({}) # Import records to the SFA registry # SITE for site in sites: # Get hrn of the site (authority) site_hrn = _get_site_hrn(interface_hrn, site) # Try to locate the site_hrn in the SFA records site_record = self.locate_by_type_hrn('authority', site_hrn) if not site_record: # Create/Import record for the site authority try: urn = hrn_to_urn(site_hrn, 'authority') if not self.auth_hierarchy.auth_exists(urn): self.auth_hierarchy.create_auth(urn) auth_info = self.auth_hierarchy.get_auth_info(urn) # Create record for the site authority and add it to the Registry site_record = RegAuthority( hrn=site_hrn, gid=auth_info.get_gid_object(), pointer=-1, authority=get_authority(site_hrn)) site_record.just_created() global_dbsession.add(site_record) global_dbsession.commit() self.logger.info( "CLabImporter: imported authority (site) : %s" % site_hrn) self.remember_record(site_record) except: # if the site import fails then there is no point in trying to import the # site's child records (node, slices, persons), so skip them. self.logger.log_exc( "CLabImporter: failed to import site. Skipping child records" ) continue else: # Authority record already in the SFA registry. Update? pass # Fresh record in SFA Registry site_record.stale = False # DEBUG #print '*********** ALL RECORDS ***********' #all_records = global_dbsession.query(RegRecord).all() #for record in all_records: # print record # For the current site authority, import child entities/records # NODES for node in nodes: # Obtain parameters of the node: site_auth, site_name and hrn of the node site_auth = get_authority(site_hrn) site_name = site['name'] node_hrn = hostname_to_hrn(site_hrn, node['name']) # Reduce hrn up to 64 characters if len(node_hrn) > 64: node_hrn = node_hrn[:64] # Try to locate the node_hrn in the SFA records node_record = self.locate_by_type_hrn('node', node_hrn) if not node_record: # Create/Import record for the node try: # Create a keypair for the node pkey = Keypair(create=True) # Obtain parameters urn = hrn_to_urn(node_hrn, 'node') node_gid = self.auth_hierarchy.create_gid( urn, create_uuid(), pkey) # Create record for the node and add it to the Registry node_record = RegNode( hrn=node_hrn, gid=node_gid, pointer=node['id'], authority=get_authority(node_hrn)) node_record.just_created() global_dbsession.add(node_record) global_dbsession.commit() self.logger.info("CLabImporter: imported node: %s" % node_hrn) self.remember_record(node_record) except: self.logger.log_exc( "CLabImporter: failed to import node") else: # Node record already in the SFA registry. Update? pass # Fresh record in SFA Registry node_record.stale = False # DEBUG #print '*********** ALL RECORDS ***********' #all_records = global_dbsession.query(RegRecord).all() #for record in all_records: # print record # USERS for user in users: # dummyimporter uses email... but Clab can use user['name'] user_hrn = username_to_hrn(site_hrn, user['name']) # Reduce hrn up to 64 characters if len(user_hrn) > 64: user_hrn = user_hrn[:64] user_urn = hrn_to_urn(user_hrn, 'user') # Try to locate the user_hrn in the SFA records user_record = self.locate_by_type_hrn('user', user_hrn) # Auxiliary function to get the keypair of the user from the testbed database # If multiple keys, randomly pick the first key in the set # If no keys, generate a new keypair for the user's gird def init_user_key(user): pubkey = None pkey = None if user['auth_tokens']: # randomly pick first key in set for key in user['auth_tokens']: pubkey = key try: pkey = convert_public_key(pubkey) break except: continue if not pkey: self.logger.warn( 'CLabImporter: unable to convert public key for %s' % user_hrn) pkey = Keypair(create=True) else: # the user has no keys. Creating a random keypair for the user's gid self.logger.warn( "CLabImporter: user %s does not have a CLab public key" % user_hrn) pkey = Keypair(create=True) return (pubkey, pkey) ########################### try: if not user_record: # Create/Import record for the user # Create a keypair for the node (pubkey, pkey) = init_user_key(user) # Obtain parameters user_gid = self.auth_hierarchy.create_gid( user_urn, create_uuid(), pkey) user_gid.set_email("*****@*****.**" % (user['name'])) # Create record for the node and add it to the Registry user_record = RegUser( hrn=user_hrn, gid=user_gid, pointer=user['id'], authority=get_authority(user_hrn), email="*****@*****.**" % (user['name'])) if pubkey: user_record.reg_keys = [RegKey(pubkey)] else: self.logger.warning("No key found for user %s" % user_hrn) user_record.just_created() global_dbsession.add(user_record) global_dbsession.commit() self.logger.info("ClabImporter: imported person: %s" % user_hrn) self.remember_record(user_record) else: # update the record ? # if user's primary key has changed then we need to update the # users gid by forcing an update here sfa_keys = user_record.reg_keys def key_in_list(key, sfa_keys): for reg_key in sfa_keys: if reg_key.key == key: return True return False # is there a new key in Dummy TB ? new_keys = False for key in user['auth_tokens']: if not key_in_list(key, sfa_keys): new_keys = True if new_keys: (pubkey, pkey) = init_user_key(user) user_gid = self.auth_hierarchy.create_gid( user_urn, create_uuid(), pkey) if not pubkey: user_record.reg_keys = [] else: user_record.reg_keys = [RegKey(pubkey)] self.logger.info( "CLabImporter: updated person: %s" % user_hrn) user_record.email = "*****@*****.**" % (user['name']) global_dbsession.commit() # Fresh record in SFA Registry user_record.stale = False except: self.logger.log_exc( "CLabImporter: failed to import user %d %s" % (user['id'], user['name'])) # DEBUG #print '*********** ALL RECORDS ***********' #all_records = global_dbsession.query(RegRecord).all() #for record in all_records: # print record # SLICES for slice in slices: # Obtain parameters of the node: site_auth, site_name and hrn of the slice slice_hrn = slicename_to_hrn(site_hrn, slice['name']) # Try to locate the slice_hrn in the SFA records slice_record = self.locate_by_type_hrn('slice', slice_hrn) if not slice_record: # Create/Import record for the slice try: #Create a keypair for the slice pkey = Keypair(create=True) # Obtain parameters urn = hrn_to_urn(slice_hrn, 'slice') slice_gid = self.auth_hierarchy.create_gid( urn, create_uuid(), pkey) # Create record for the slice and add it to the Registry slice_record = RegSlice( hrn=slice_hrn, gid=slice_gid, pointer=slice['id'], authority=get_authority(slice_hrn)) slice_record.just_created() global_dbsession.add(slice_record) global_dbsession.commit() self.logger.info("CLabImporter: imported slice: %s" % slice_hrn) self.remember_record(slice_record) except: self.logger.log_exc( "CLabImporter: failed to import slice") else: # Slice record already in the SFA registry. Update? self.logger.warning( "Slice already existing in SFA Registry") pass # Get current users associated with the slice users_of_slice = shell.get_users_by_slice(slice) # record current users associated with the slice slice_record.reg_researchers = \ [ self.locate_by_type_pointer ('user',user['id']) for user in users_of_slice] global_dbsession.commit() # Fresh record in SFA Registry slice_record.stale = False # Remove stale records. Old/non-fresh records that were in the SFA Registry # Preserve special records system_hrns = [ interface_hrn, root_auth, interface_hrn + '.slicemanager' ] for record in all_records: if record.hrn in system_hrns: record.stale = False if record.peer_authority: record.stale = False # Remove all the records that do not have its stale parameter set to False for record in all_records: try: stale = record.stale except: stale = True self.logger.warning("stale not found with %s" % record) if stale: self.logger.info("CLabImporter: deleting stale record: %s" % record) global_dbsession.delete(record) global_dbsession.commit() # DEBUG print 'SFA REGISTRY - Result of Import:' all_records = global_dbsession.query(RegRecord).all() for record in all_records: print record
def run (self, options): config = Config () interface_hrn = config.SFA_INTERFACE_HRN root_auth = config.SFA_REGISTRY_ROOT_AUTH shell = DummyShell (config) ######## retrieve all existing SFA objects all_records = global_dbsession.query(RegRecord).all() # create hash by (type,hrn) # we essentially use this to know if a given record is already known to SFA self.records_by_type_hrn = \ dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] ) # create hash by (type,pointer) self.records_by_type_pointer = \ dict ( [ ( (record.type, record.pointer) , record ) for record in all_records if record.pointer != -1] ) # initialize record.stale to True by default, then mark stale=False on the ones that are in use for record in all_records: record.stale=True # DEBUG #all_records = global_dbsession.query(RegRecord).all() #for record in all_records: print record ######## retrieve Dummy TB data # Get all plc sites # retrieve only required stuf sites = [shell.GetTestbedInfo()] print "sites: " + sites # create a hash of sites by login_base # sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] ) # Get all dummy TB users users = shell.GetUsers() # create a hash of users by user_id users_by_id = dict ( [ ( user['user_id'], user) for user in users ] ) # Get all dummy TB public keys keys = [] for user in users: if 'keys' in user: keys.extend(user['keys']) # create a dict user_id -> [ keys ] keys_by_person_id = {} for user in users: if 'keys' in user: keys_by_person_id[user['user_id']] = user['keys'] # Get all dummy TB nodes nodes = shell.GetNodes() # create hash by node_id nodes_by_id = dict ( [ ( node['node_id'], node, ) for node in nodes ] ) # Get all dummy TB slices slices = shell.GetSlices() # create hash by slice_id slices_by_id = dict ( [ (slice['slice_id'], slice ) for slice in slices ] ) # start importing print " STARTING FOR SITES" for site in sites: site_hrn = _get_site_hrn(interface_hrn, site) # import if hrn is not in list of existing hrns or if the hrn exists # but its not a site record site_record=self.locate_by_type_hrn ('authority', site_hrn) print site_hrn print site_record if not site_record: try: print "TRY TO CREATE SITE RECORD" urn = hrn_to_urn(site_hrn, 'authority') if not self.auth_hierarchy.auth_exists(urn): print "create auth "+urn self.auth_hierarchy.create_auth(urn) auth_info = self.auth_hierarchy.get_auth_info(urn) site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(), pointer= -1, authority=get_authority(site_hrn)) site_record.just_created() print "urn: "+urn print "auth_info: " + auth_info print site_record global_dbsession.add(site_record) global_dbsession.commit() self.logger.info("DummyImporter: imported authority (site) : %s" % site_record) self.remember_record (site_record) except: # if the site import fails then there is no point in trying to import the # site's child records (node, slices, persons), so skip them. self.logger.log_exc("DummyImporter: failed to import site. Skipping child records") continue else: # xxx update the record ... pass site_record.stale=False # import node records for node in nodes: site_auth = get_authority(site_hrn) site_name = site['name'] node_hrn = hostname_to_hrn(site_auth, site_name, node['hostname']) # xxx this sounds suspicious if len(node_hrn) > 64: node_hrn = node_hrn[:64] node_record = self.locate_by_type_hrn ( 'node', node_hrn ) if not node_record: try: pkey = Keypair(create=True) urn = hrn_to_urn(node_hrn, 'node') node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) node_record = RegNode (hrn=node_hrn, gid=node_gid, pointer =node['node_id'], authority=get_authority(node_hrn)) node_record.just_created() global_dbsession.add(node_record) global_dbsession.commit() self.logger.info("DummyImporter: imported node: %s" % node_record) self.remember_record (node_record) except: self.logger.log_exc("DummyImporter: failed to import node") else: # xxx update the record ... pass node_record.stale=False all_records = global_dbsession.query(RegRecord).all() for record in all_records: print record site_pis=[] # import users for user in users: user_hrn = email_to_hrn(site_hrn, user['email']) # xxx suspicious again if len(user_hrn) > 64: user_hrn = user_hrn[:64] user_urn = hrn_to_urn(user_hrn, 'user') user_record = self.locate_by_type_hrn ( 'user', user_hrn) # return a tuple pubkey (a dummy TB key object) and pkey (a Keypair object) def init_user_key (user): pubkey = None pkey = None if user['keys']: # randomly pick first key in set for key in user['keys']: pubkey = key try: pkey = convert_public_key(pubkey) break except: continue if not pkey: self.logger.warn('DummyImporter: unable to convert public key for %s' % user_hrn) pkey = Keypair(create=True) else: # the user has no keys. Creating a random keypair for the user's gid self.logger.warn("DummyImporter: user %s does not have a NITOS public key"%user_hrn) pkey = Keypair(create=True) return (pubkey, pkey) # new user try: if not user_record: (pubkey,pkey) = init_user_key (user) user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey) user_gid.set_email(user['email']) user_record = RegUser (hrn=user_hrn, gid=user_gid, pointer=user['user_id'], authority=get_authority(user_hrn), email=user['email']) if pubkey: user_record.reg_keys=[RegKey (pubkey)] else: self.logger.warning("No key found for user %s"%user_record) user_record.just_created() global_dbsession.add (user_record) global_dbsession.commit() self.logger.info("DummyImporter: imported person: %s" % user_record) self.remember_record ( user_record ) else: # update the record ? # if user's primary key has changed then we need to update the # users gid by forcing an update here sfa_keys = user_record.reg_keys def key_in_list (key,sfa_keys): for reg_key in sfa_keys: if reg_key.key==key: return True return False # is there a new key in Dummy TB ? new_keys=False for key in user['keys']: if not key_in_list (key,sfa_keys): new_keys = True if new_keys: (pubkey,pkey) = init_user_key (user) user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey) if not pubkey: user_record.reg_keys=[] else: user_record.reg_keys=[ RegKey (pubkey)] self.logger.info("DummyImporter: updated person: %s" % user_record) user_record.email = user['email'] global_dbsession.commit() user_record.stale=False except: self.logger.log_exc("DummyImporter: failed to import user %d %s"%(user['user_id'],user['email'])) # import slices for slice in slices: slice_hrn = slicename_to_hrn(site_hrn, slice['slice_name']) slice_record = self.locate_by_type_hrn ('slice', slice_hrn) if not slice_record: try: pkey = Keypair(create=True) urn = hrn_to_urn(slice_hrn, 'slice') slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, pointer=slice['slice_id'], authority=get_authority(slice_hrn)) slice_record.just_created() global_dbsession.add(slice_record) global_dbsession.commit() self.logger.info("DummyImporter: imported slice: %s" % slice_record) self.remember_record ( slice_record ) except: self.logger.log_exc("DummyImporter: failed to import slice") else: # xxx update the record ... self.logger.warning ("Slice update not yet implemented") pass # record current users affiliated with the slice slice_record.reg_researchers = \ [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['user_ids'] ] global_dbsession.commit() slice_record.stale=False ### remove stale records # special records must be preserved system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager'] for record in all_records: if record.hrn in system_hrns: record.stale=False if record.peer_authority: record.stale=False for record in all_records: try: stale=record.stale except: stale=True self.logger.warning("stale not found with %s"%record) if stale: self.logger.info("DummyImporter: deleting stale record: %s" % record) global_dbsession.delete(record) global_dbsession.commit()
def Update(self, api, record_dict): logger.debug("Update: entering with record_dict=%s"%printable(record_dict)) normalize_input_record (record_dict) logger.debug("Update: normalized record_dict=%s"%printable(record_dict)) dbsession=api.dbsession() assert ('type' in record_dict) new_record=make_record(dict=record_dict) (type,hrn) = (new_record.type, new_record.hrn) # make sure the record exists record = dbsession.query(RegRecord).filter_by(type=type,hrn=hrn).first() if not record: raise RecordNotFound("hrn=%s, type=%s"%(hrn,type)) record.just_updated() # Use the pointer from the existing record, not the one that the user # gave us. This prevents the user from inserting a forged pointer pointer = record.pointer # is there a change in keys ? new_key=None if type=='user': if getattr(new_record,'keys',None): new_key=new_record.keys if isinstance (new_key,types.ListType): new_key=new_key[0] # take new_key into account if new_key: # update the openssl key and gid pkey = convert_public_key(new_key) uuid = create_uuid() urn = hrn_to_urn(hrn,type) email=getattr(new_record,'email',None) if email is None: email=getattr(record,'email',None) gid_object = api.auth.hierarchy.create_gid(urn, uuid, pkey, email = email) gid = gid_object.save_to_string(save_parents=True) # xxx should do side effects from new_record to record # not too sure how to do that # not too big a deal with planetlab as the driver is authoritative, but... # update native relations if isinstance (record, RegSlice): researcher_hrns = getattr(new_record,'reg-researchers',None) if researcher_hrns is not None: record.update_researchers (researcher_hrns, dbsession) elif isinstance (record, RegAuthority): pi_hrns = getattr(new_record,'reg-pis',None) if pi_hrns is not None: record.update_pis (pi_hrns, dbsession) # update the PLC information that was specified with the record # xxx oddly enough, without this useless statement, # record.__dict__ as received by the driver seems to be off # anyway the driver should receive an object # (and then extract __dict__ itself if needed) print "DO NOT REMOVE ME before driver.update, record=%s"%record new_key_pointer = -1 try: (pointer, new_key_pointer) = api.driver.update (record.__dict__, new_record.__dict__, hrn, new_key) except: pass if new_key and new_key_pointer: record.reg_keys=[ RegKey (new_key, new_key_pointer)] record.gid = gid dbsession.commit() # update membership for researchers, pis, owners, operators self.update_driver_relations (api, record, new_record) return 1
def run (self, options): config = Config () interface_hrn = config.SFA_INTERFACE_HRN root_auth = config.SFA_REGISTRY_ROOT_AUTH shell = NitosShell (config) ######## retrieve all existing SFA objects all_records = dbsession.query(RegRecord).all() # create hash by (type,hrn) # we essentially use this to know if a given record is already known to SFA self.records_by_type_hrn = \ dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] ) # create hash by (type,pointer) self.records_by_type_pointer = \ dict ( [ ( (record.type, record.pointer) , record ) for record in all_records if record.pointer != -1] ) # initialize record.stale to True by default, then mark stale=False on the ones that are in use for record in all_records: record.stale=True ######## retrieve NITOS data # Get site info # retrieve only required stuf site = shell.getTestbedInfo() sites = [site] # create a hash of sites by login_base # # sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] ) # Get all NITOS users users = shell.getUsers() # create a hash of users by user_id users_by_id = dict ( [ ( user['user_id'], user) for user in users ] ) # Get all NITOS public keys # accumulate key ids for keys retrieval # key_ids = [] # for person in persons: # key_ids.extend(person['key_ids']) # keys = shell.GetKeys( {'peer_id': None, 'key_id': key_ids, # 'key_type': 'ssh'} ) # # create a hash of keys by key_id # keys_by_id = dict ( [ ( key['key_id'], key ) for key in keys ] ) # create a dict user_id -> [ (nitos)keys ] keys_by_user_id = dict ( [ ( user['user_id'], user['keys']) for user in users ] ) # Get all nitos nodes nodes = shell.getNodes({}, []) # create hash by node_id nodes_by_id = dict ( [ (node['node_id'], node) for node in nodes ] ) # Get all nitos slices slices = shell.getSlices({}, []) # create hash by slice_id slices_by_id = dict ( [ (slice['slice_id'], slice) for slice in slices ] ) # start importing for site in sites: #for i in [0]: site_hrn = _get_site_hrn(interface_hrn, site) # import if hrn is not in list of existing hrns or if the hrn exists # but its not a site record site_record=self.locate_by_type_hrn ('authority', site_hrn) if not site_record: try: urn = hrn_to_urn(site_hrn, 'authority') if not self.auth_hierarchy.auth_exists(urn): self.auth_hierarchy.create_auth(urn) auth_info = self.auth_hierarchy.get_auth_info(urn) site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(), pointer=0, authority=get_authority(site_hrn)) site_record.just_created() dbsession.add(site_record) dbsession.commit() self.logger.info("NitosImporter: imported authority (site) : %s" % site_record) self.remember_record (site_record) except: # if the site import fails then there is no point in trying to import the # site's child records (node, slices, persons), so skip them. self.logger.log_exc("NitosImporter: failed to import site. Skipping child records") continue else: # xxx update the record ... pass site_record.stale=False # import node records for node in nodes: site_auth = get_authority(site_hrn) site_name = site['name'] node_hrn = hostname_to_hrn(site_auth, site_name, node['hostname']) # xxx this sounds suspicious if len(node_hrn) > 64: node_hrn = node_hrn[:64] node_record = self.locate_by_type_hrn ( 'node', node_hrn ) if not node_record: try: pkey = Keypair(create=True) urn = hrn_to_urn(node_hrn, 'node') node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) node_record = RegNode (hrn=node_hrn, gid=node_gid, pointer =node['node_id'], authority=get_authority(node_hrn)) node_record.just_created() dbsession.add(node_record) dbsession.commit() self.logger.info("NitosImporter: imported node: %s" % node_record) self.remember_record (node_record) except: self.logger.log_exc("NitosImporter: failed to import node") else: # xxx update the record ... pass node_record.stale=False # import users for user in users: user_hrn = username_to_hrn(interface_hrn, site['name'], user['username']) # xxx suspicious again if len(user_hrn) > 64: user_hrn = user_hrn[:64] user_urn = hrn_to_urn(user_hrn, 'user') user_record = self.locate_by_type_hrn ( 'user', user_hrn) # return a tuple pubkey (a nitos key object) and pkey (a Keypair object) def init_user_key (user): pubkey = None pkey = None if user['keys']: # randomly pick first key in set for key in user['keys']: pubkey = key try: pkey = convert_public_key(pubkey) break except: continue if not pkey: self.logger.warn('NitosImporter: unable to convert public key for %s' % user_hrn) pkey = Keypair(create=True) else: # the user has no keys. Creating a random keypair for the user's gid self.logger.warn("NitosImporter: user %s does not have a NITOS public key"%user_hrn) pkey = Keypair(create=True) return (pubkey, pkey) # new user try: if not user_record: (pubkey,pkey) = init_user_key (user) user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey) user_gid.set_email(user['email']) user_record = RegUser (hrn=user_hrn, gid=user_gid, pointer=user['user_id'], authority=get_authority(user_hrn), email=user['email']) if pubkey: user_record.reg_keys=[RegKey (pubkey)] else: self.logger.warning("No key found for user %s"%user_record) user_record.just_created() dbsession.add (user_record) dbsession.commit() self.logger.info("NitosImporter: imported user: %s" % user_record) self.remember_record ( user_record ) else: # update the record ? # if user's primary key has changed then we need to update the # users gid by forcing an update here sfa_keys = user_record.reg_keys def sfa_key_in_list (sfa_key,nitos_user_keys): for nitos_key in nitos_user_keys: if nitos_key==sfa_key: return True return False # are all the SFA keys known to nitos ? new_keys=False if not sfa_keys and user['keys']: new_keys = True else: for sfa_key in sfa_keys: if not sfa_key_in_list (sfa_key.key,user['keys']): new_keys = True if new_keys: (pubkey,pkey) = init_user_key (user) user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey) if not pubkey: user_record.reg_keys=[] else: user_record.reg_keys=[ RegKey (pubkey)] user_record.gid = user_gid user_record.just_updated() self.logger.info("NitosImporter: updated user: %s" % user_record) user_record.email = user['email'] dbsession.commit() user_record.stale=False except: self.logger.log_exc("NitosImporter: failed to import user %s %s"%(user['user_id'],user['email'])) # import slices for slice in slices: slice_hrn = slicename_to_hrn(interface_hrn, site['name'], slice['slice_name']) slice_record = self.locate_by_type_hrn ('slice', slice_hrn) if not slice_record: try: pkey = Keypair(create=True) urn = hrn_to_urn(slice_hrn, 'slice') slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, pointer=slice['slice_id'], authority=get_authority(slice_hrn)) slice_record.just_created() dbsession.add(slice_record) dbsession.commit() self.logger.info("NitosImporter: imported slice: %s" % slice_record) self.remember_record ( slice_record ) except: self.logger.log_exc("NitosImporter: failed to import slice") else: # xxx update the record ... self.logger.warning ("Slice update not yet implemented") pass # record current users affiliated with the slice slice_record.reg_researchers = \ [ self.locate_by_type_pointer ('user',int(user_id)) for user_id in slice['user_ids'] ] dbsession.commit() slice_record.stale=False ### remove stale records # special records must be preserved system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager'] for record in all_records: if record.hrn in system_hrns: record.stale=False if record.peer_authority: record.stale=False for record in all_records: try: stale=record.stale except: stale=True self.logger.warning("stale not found with %s"%record) if stale: self.logger.info("NitosImporter: deleting stale record: %s" % record) dbsession.delete(record) dbsession.commit()
def run (self, options): config = Config () interface_hrn = config.SFA_INTERFACE_HRN root_auth = config.SFA_REGISTRY_ROOT_AUTH shell = ClabShell (config) # retrieve all existing SFA objects all_records = global_dbsession.query(RegRecord).all() # Delete all default records #for record in all_records: # global_dbsession.delete(record) # global_dbsession.commit() #all_records = global_dbsession.query(RegRecord).all() # Dicts to avoid duplicates in SFA database # create dict keyed by (type,hrn) self.records_by_type_hrn = dict([((record.type, record.hrn), record) for record in all_records ] ) # create dict keyed by (type,pointer) self.records_by_type_pointer = dict([((record.type, record.pointer), record) for record in all_records if record.pointer != -1]) # initialize record.stale to True by default, then mark stale=False on the ones that are in use for record in all_records: record.stale=True # Retrieve data from the CLab testbed and create dictionaries by id # SITE sites = [shell.get_testbed_info()] # USERS users = shell.get_users({}) #users_by_id = dict ( [ ( user['id'], user) for user in users ] ) # KEYS # auth_tokens of the users. Dict (user_id:[keys]) # NODES nodes = shell.get_nodes({}) # SLICES slices = shell.get_slices({}) # Import records to the SFA registry # SITE for site in sites: # Get hrn of the site (authority) site_hrn = _get_site_hrn(interface_hrn, site) # Try to locate the site_hrn in the SFA records site_record=self.locate_by_type_hrn ('authority', site_hrn) if not site_record: # Create/Import record for the site authority try: urn = hrn_to_urn(site_hrn, 'authority') if not self.auth_hierarchy.auth_exists(urn): self.auth_hierarchy.create_auth(urn) auth_info = self.auth_hierarchy.get_auth_info(urn) # Create record for the site authority and add it to the Registry site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(), pointer= -1, authority=get_authority(site_hrn)) site_record.just_created() global_dbsession.add(site_record) global_dbsession.commit() self.logger.info("CLabImporter: imported authority (site) : %s" % site_hrn) self.remember_record (site_record) except: # if the site import fails then there is no point in trying to import the # site's child records (node, slices, persons), so skip them. self.logger.log_exc("CLabImporter: failed to import site. Skipping child records") continue else: # Authority record already in the SFA registry. Update? pass # Fresh record in SFA Registry site_record.stale=False # DEBUG #print '*********** ALL RECORDS ***********' #all_records = global_dbsession.query(RegRecord).all() #for record in all_records: # print record # For the current site authority, import child entities/records # NODES for node in nodes: # Obtain parameters of the node: site_auth, site_name and hrn of the node site_auth = get_authority(site_hrn) site_name = site['name'] node_hrn = hostname_to_hrn(site_hrn, node['name']) # Reduce hrn up to 64 characters if len(node_hrn) > 64: node_hrn = node_hrn[:64] # Try to locate the node_hrn in the SFA records node_record = self.locate_by_type_hrn ('node', node_hrn ) if not node_record: # Create/Import record for the node try: # Create a keypair for the node pkey = Keypair(create=True) # Obtain parameters urn = hrn_to_urn(node_hrn, 'node') node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) # Create record for the node and add it to the Registry node_record = RegNode (hrn=node_hrn, gid=node_gid, pointer =node['id'], authority=get_authority(node_hrn)) node_record.just_created() global_dbsession.add(node_record) global_dbsession.commit() self.logger.info("CLabImporter: imported node: %s" %node_hrn) self.remember_record (node_record) except: self.logger.log_exc("CLabImporter: failed to import node") else: # Node record already in the SFA registry. Update? pass # Fresh record in SFA Registry node_record.stale=False # DEBUG #print '*********** ALL RECORDS ***********' #all_records = global_dbsession.query(RegRecord).all() #for record in all_records: # print record # USERS for user in users: # dummyimporter uses email... but Clab can use user['name'] user_hrn = username_to_hrn (site_hrn, user['name']) # Reduce hrn up to 64 characters if len(user_hrn) > 64: user_hrn = user_hrn[:64] user_urn = hrn_to_urn(user_hrn, 'user') # Try to locate the user_hrn in the SFA records user_record = self.locate_by_type_hrn ('user', user_hrn) # Auxiliary function to get the keypair of the user from the testbed database # If multiple keys, randomly pick the first key in the set # If no keys, generate a new keypair for the user's gird def init_user_key (user): pubkey = None pkey = None if user['auth_tokens']: # randomly pick first key in set for key in user['auth_tokens']: pubkey = key try: pkey = convert_public_key(pubkey) break except: continue if not pkey: self.logger.warn('CLabImporter: unable to convert public key for %s' % user_hrn) pkey = Keypair(create=True) else: # the user has no keys. Creating a random keypair for the user's gid self.logger.warn("CLabImporter: user %s does not have a CLab public key"%user_hrn) pkey = Keypair(create=True) return (pubkey, pkey) ########################### try: if not user_record: # Create/Import record for the user # Create a keypair for the node (pubkey,pkey) = init_user_key (user) # Obtain parameters user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey) user_gid.set_email("*****@*****.**"%(user['name'])) # Create record for the node and add it to the Registry user_record = RegUser (hrn=user_hrn, gid=user_gid, pointer=user['id'], authority=get_authority(user_hrn), email="*****@*****.**"%(user['name'])) if pubkey: user_record.reg_keys=[RegKey (pubkey)] else: self.logger.warning("No key found for user %s"%user_hrn) user_record.just_created() global_dbsession.add (user_record) global_dbsession.commit() self.logger.info("ClabImporter: imported person: %s" % user_hrn) self.remember_record ( user_record ) else: # update the record ? # if user's primary key has changed then we need to update the # users gid by forcing an update here sfa_keys = user_record.reg_keys def key_in_list (key,sfa_keys): for reg_key in sfa_keys: if reg_key.key==key: return True return False # is there a new key in Dummy TB ? new_keys=False for key in user['auth_tokens']: if not key_in_list (key,sfa_keys): new_keys = True if new_keys: (pubkey,pkey) = init_user_key (user) user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey) if not pubkey: user_record.reg_keys=[] else: user_record.reg_keys=[ RegKey (pubkey)] self.logger.info("CLabImporter: updated person: %s" % user_hrn) user_record.email = "*****@*****.**"%(user['name']) global_dbsession.commit() # Fresh record in SFA Registry user_record.stale=False except: self.logger.log_exc("CLabImporter: failed to import user %d %s"%(user['id'],user['name'])) # DEBUG #print '*********** ALL RECORDS ***********' #all_records = global_dbsession.query(RegRecord).all() #for record in all_records: # print record # SLICES for slice in slices: # Obtain parameters of the node: site_auth, site_name and hrn of the slice slice_hrn = slicename_to_hrn(site_hrn, slice['name']) # Try to locate the slice_hrn in the SFA records slice_record = self.locate_by_type_hrn ('slice', slice_hrn) if not slice_record: # Create/Import record for the slice try: #Create a keypair for the slice pkey = Keypair(create=True) # Obtain parameters urn = hrn_to_urn(slice_hrn, 'slice') slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) # Create record for the slice and add it to the Registry slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, pointer=slice['id'], authority=get_authority(slice_hrn)) slice_record.just_created() global_dbsession.add(slice_record) global_dbsession.commit() self.logger.info("CLabImporter: imported slice: %s" % slice_hrn) self.remember_record ( slice_record ) except: self.logger.log_exc("CLabImporter: failed to import slice") else: # Slice record already in the SFA registry. Update? self.logger.warning ("Slice already existing in SFA Registry") pass # Get current users associated with the slice users_of_slice = shell.get_users_by_slice(slice) # record current users associated with the slice slice_record.reg_researchers = \ [ self.locate_by_type_pointer ('user',user['id']) for user in users_of_slice] global_dbsession.commit() # Fresh record in SFA Registry slice_record.stale=False # Remove stale records. Old/non-fresh records that were in the SFA Registry # Preserve special records system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager'] for record in all_records: if record.hrn in system_hrns: record.stale=False if record.peer_authority: record.stale=False # Remove all the records that do not have its stale parameter set to False for record in all_records: try: stale=record.stale except: stale=True self.logger.warning("stale not found with %s"%record) if stale: self.logger.info("CLabImporter: deleting stale record: %s" % record) global_dbsession.delete(record) global_dbsession.commit() # DEBUG print 'SFA REGISTRY - Result of Import:' all_records = global_dbsession.query(RegRecord).all() for record in all_records: print record
def import_single_node(self, nodename): ''' Method to import a single node from the testbed database to the SFA Registry. The node being imported is specified by name. The method is used in the verify_node method (clab_slices.py) when a node is automatically created in the testbed database. :param nodename: name of the node being imported :type string ''' config = Config() interface_hrn = config.SFA_INTERFACE_HRN root_auth = config.SFA_REGISTRY_ROOT_AUTH shell = ClabShell(config) self.logger.debug("Import Single node: %s" % nodename) # retrieve all existing SFA objects all_records = global_dbsession.query(RegRecord).all() # Dicts to avoid duplicates in SFA database # create dict keyed by (type,hrn) self.records_by_type_hrn = dict([((record.type, record.hrn), record) for record in all_records]) # create dict keyed by (type,pointer) self.records_by_type_pointer = dict([((record.type, record.pointer), record) for record in all_records if record.pointer != -1]) # Retrieve data from the CLab testbed and create dictionaries by id # SITE site = shell.get_testbed_info() # NODES node = shell.get_node_by(node_name=nodename) # Import records to the SFA registry # SITE # Get hrn of the site (authority) site_hrn = _get_site_hrn(interface_hrn, site) # Try to locate the site_hrn in the SFA records #site_record=self.locate_by_type_hrn ('authority', site_hrn) # NODE # Obtain parameters of the node: site_auth, site_name and hrn of the node site_auth = get_authority(site_hrn) site_name = site['name'] node_hrn = hostname_to_hrn(site_hrn, node['name']) # Reduce hrn up to 64 characters if len(node_hrn) > 64: node_hrn = node_hrn[:64] # Try to locate the node_hrn in the SFA records node_record = self.locate_by_type_hrn('node', node_hrn) if not node_record: # Create/Import record for the node try: # Create a keypair for the node pkey = Keypair(create=True) # Obtain parameters urn = hrn_to_urn(node_hrn, 'node') node_gid = self.auth_hierarchy.create_gid( urn, create_uuid(), pkey) # Create record for the node and add it to the Registry node_record = RegNode(hrn=node_hrn, gid=node_gid, pointer=node['id'], authority=get_authority(node_hrn)) node_record.just_created() global_dbsession.add(node_record) global_dbsession.commit() self.logger.info("CLabImporter: imported node: %s" % node_hrn) self.remember_record(node_record) except: self.logger.log_exc("CLabImporter: failed to import node") else: # Node record already in the SFA registry. Update? pass
def import_persons_and_slices(self, testbed_shell): """ Gets user data from LDAP, process the information. Creates hrn for the user's slice, the user's gid, creates the RegUser record associated with user. Creates the RegKey record associated nwith the user's key. Saves those records into the SFA DB. import the user's slice onto the database as well by calling import_slice. :param testbed_shell: IotlabDriver object, used to have access to testbed_shell attributes. :type testbed_shell: IotlabDriver .. warning:: does not support multiple keys per user """ ldap_person_listdict = testbed_shell.GetPersons() self.logger.info("IOTLABIMPORT \t ldap_person_listdict %s \r\n" % (ldap_person_listdict)) # import persons for person in ldap_person_listdict: self.logger.info("IotlabImporter: person :" % (person)) if 'ssh-rsa' not in person['pkey']: #people with invalid ssh key (ssh-dss, empty, bullshit keys...) #won't be imported continue person_hrn = person['hrn'] slice_hrn = self.slicename_to_hrn(person['hrn']) # xxx suspicious again if len(person_hrn) > 64: person_hrn = person_hrn[:64] person_urn = hrn_to_urn(person_hrn, 'user') self.logger.info("IotlabImporter: users_rec_by_email %s " % (self.users_rec_by_email)) #Check if user using person['email'] from LDAP is already registered #in SFA. One email = one person. In this case, do not create another #record for this person #person_hrn returned by GetPerson based on iotlab root auth + #uid ldap user_record = self.find_record_by_type_hrn('user', person_hrn) if not user_record and person['email'] in self.users_rec_by_email: user_record = self.users_rec_by_email[person['email']] person_hrn = user_record.hrn person_urn = hrn_to_urn(person_hrn, 'user') slice_record = self.find_record_by_type_hrn('slice', slice_hrn) iotlab_key = person['pkey'] # new person if not user_record: (pubkey, pkey) = self.init_person_key(person, iotlab_key) if pubkey is not None and pkey is not None: person_gid = \ self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey) if person['email']: self.logger.debug("IOTLAB IMPORTER \ PERSON EMAIL OK email %s " % (person['email'])) person_gid.set_email(person['email']) user_record = \ RegUser(hrn=person_hrn, gid=person_gid, pointer='-1', authority=get_authority(person_hrn), email=person['email']) else: user_record = \ RegUser(hrn=person_hrn, gid=person_gid, pointer='-1', authority=get_authority(person_hrn)) if pubkey: user_record.reg_keys = [RegKey(pubkey)] else: self.logger.warning("No key found for user %s" % (user_record)) try: user_record.just_created() global_dbsession.add (user_record) global_dbsession.commit() self.logger.info("IotlabImporter: imported person \ %s" % (user_record)) self.update_just_added_records_dict(user_record) except SQLAlchemyError: self.logger.log_exc("IotlabImporter: \ failed to import person %s" % (person)) else: # update the record ? # if user's primary key has changed then we need to update # the users gid by forcing an update here sfa_keys = user_record.reg_keys new_key = False if iotlab_key is not sfa_keys: new_key = True if new_key: self.logger.info("IotlabImporter: \t \t USER UPDATE \ person: %s" % (person['hrn'])) (pubkey, pkey) = self.init_person_key(person, iotlab_key) person_gid = \ self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey) if not pubkey: user_record.reg_keys = [] else: user_record.reg_keys = [RegKey(pubkey)] self.logger.info("IotlabImporter: updated person: %s" % (user_record)) if person['email']: user_record.email = person['email'] try: global_dbsession.commit() user_record.stale = False except SQLAlchemyError: self.logger.log_exc("IotlabImporter: \ failed to update person %s"% (person)) self.import_slice(slice_hrn, slice_record, user_record)
def run (self, options): config = Config () interface_hrn = config.SFA_INTERFACE_HRN root_auth = config.SFA_REGISTRY_ROOT_AUTH shell = PlShell (config) ######## retrieve all existing SFA objects all_records = global_dbsession.query(RegRecord).all() # create hash by (type,hrn) # we essentially use this to know if a given record is already known to SFA self.records_by_type_hrn = \ dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] ) # create hash by (type,pointer) self.records_by_type_pointer = \ dict ( [ ( (record.type, record.pointer) , record ) for record in all_records if record.pointer != -1] ) # initialize record.stale to True by default, then mark stale=False on the ones that are in use for record in all_records: record.stale=True ######## retrieve PLC data # Get all plc sites # retrieve only required stuf sites = shell.GetSites({'peer_id': None, 'enabled' : True}, ['site_id','login_base','node_ids','slice_ids','person_ids', 'name', 'hrn']) # create a hash of sites by login_base # sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] ) # Get all plc users persons = shell.GetPersons({'peer_id': None, 'enabled': True}, ['person_id', 'email', 'key_ids', 'site_ids', 'role_ids', 'hrn']) # create a hash of persons by person_id persons_by_id = dict ( [ ( person['person_id'], person) for person in persons ] ) # also gather non-enabled user accounts so as to issue relevant warnings disabled_persons = shell.GetPersons({'peer_id': None, 'enabled': False}, ['person_id']) disabled_person_ids = [ person['person_id'] for person in disabled_persons ] # Get all plc public keys # accumulate key ids for keys retrieval key_ids = [] for person in persons: key_ids.extend(person['key_ids']) keys = shell.GetKeys( {'peer_id': None, 'key_id': key_ids, 'key_type': 'ssh'} ) # create a hash of keys by key_id keys_by_id = dict ( [ ( key['key_id'], key ) for key in keys ] ) # create a dict person_id -> [ (plc)keys ] keys_by_person_id = {} for person in persons: pubkeys = [] for key_id in person['key_ids']: # by construction all the keys we fetched are ssh keys # so gpg keys won't be in there try: key = keys_by_id[key_id] pubkeys.append(key) except: self.logger.warning("Could not spot key %d - probably non-ssh"%key_id) keys_by_person_id[person['person_id']] = pubkeys # Get all plc nodes nodes = shell.GetNodes( {'peer_id': None}, ['node_id', 'hostname', 'site_id']) # create hash by node_id nodes_by_id = dict ( [ ( node['node_id'], node, ) for node in nodes ] ) # Get all plc slices slices = shell.GetSlices( {'peer_id': None}, ['slice_id', 'name', 'person_ids', 'hrn']) # create hash by slice_id slices_by_id = dict ( [ (slice['slice_id'], slice ) for slice in slices ] ) # isolate special vini case in separate method self.create_special_vini_record (interface_hrn) # Get top authority record top_auth_record=self.locate_by_type_hrn ('authority', root_auth) admins = [] # start importing for site in sites: try: site_sfa_created = shell.GetSiteSfaCreated(site['site_id']) except: site_sfa_created = None if site['name'].startswith('sfa:') or site_sfa_created == 'True': continue #site_hrn = _get_site_hrn(interface_hrn, site) site_hrn = site['hrn'] # import if hrn is not in list of existing hrns or if the hrn exists # but its not a site record site_record=self.locate_by_type_hrn ('authority', site_hrn) if not site_record: try: urn = hrn_to_urn(site_hrn, 'authority') if not self.auth_hierarchy.auth_exists(urn): self.auth_hierarchy.create_auth(urn) auth_info = self.auth_hierarchy.get_auth_info(urn) site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(), pointer=site['site_id'], authority=get_authority(site_hrn)) site_record.just_created() global_dbsession.add(site_record) global_dbsession.commit() self.logger.info("PlImporter: imported authority (site) : %s" % site_record) self.remember_record (site_record) except: # if the site import fails then there is no point in trying to import the # site's child records (node, slices, persons), so skip them. self.logger.log_exc("PlImporter: failed to import site %s. Skipping child records"%site_hrn) continue else: # xxx update the record ... pass site_record.stale=False # import node records for node_id in site['node_ids']: try: node = nodes_by_id[node_id] except: self.logger.warning ("PlImporter: cannot find node_id %s - ignored"%node_id) continue site_auth = get_authority(site_hrn) site_name = site['login_base'] node_hrn = hostname_to_hrn(site_auth, site_name, node['hostname']) # xxx this sounds suspicious if len(node_hrn) > 64: node_hrn = node_hrn[:64] node_record = self.locate_by_type_hrn ( 'node', node_hrn ) if not node_record: try: pkey = Keypair(create=True) urn = hrn_to_urn(node_hrn, 'node') node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) node_record = RegNode (hrn=node_hrn, gid=node_gid, pointer =node['node_id'], authority=get_authority(node_hrn)) node_record.just_created() global_dbsession.add(node_record) global_dbsession.commit() self.logger.info("PlImporter: imported node: %s" % node_record) self.remember_record (node_record) except: self.logger.log_exc("PlImporter: failed to import node %s"%node_hrn) continue else: # xxx update the record ... pass node_record.stale=False site_pis=[] # import persons for person_id in site['person_ids']: proceed=False if person_id in persons_by_id: person=persons_by_id[person_id] proceed=True elif person_id in disabled_person_ids: pass else: self.logger.warning ("PlImporter: cannot locate person_id %s in site %s - ignored"%(person_id,site_hrn)) # make sure to NOT run this if anything is wrong if not proceed: continue #person_hrn = email_to_hrn(site_hrn, person['email']) person_hrn = person['hrn'] if person_hrn is None: self.logger.warn("Person %s has no hrn - skipped"%person['email']) continue # xxx suspicious again if len(person_hrn) > 64: person_hrn = person_hrn[:64] person_urn = hrn_to_urn(person_hrn, 'user') user_record = self.locate_by_type_hrn ( 'user', person_hrn) # return a tuple pubkey (a plc key object) and pkey (a Keypair object) def init_person_key (person, plc_keys): pubkey=None if person['key_ids']: # randomly pick first key in set pubkey = plc_keys[0] try: pkey = convert_public_key(pubkey['key']) except: self.logger.warn('PlImporter: unable to convert public key for %s' % person_hrn) pkey = Keypair(create=True) else: # the user has no keys. Creating a random keypair for the user's gid self.logger.warn("PlImporter: person %s does not have a PL public key"%person_hrn) pkey = Keypair(create=True) return (pubkey, pkey) # new person try: plc_keys = keys_by_person_id.get(person['person_id'],[]) if not user_record: (pubkey,pkey) = init_person_key (person, plc_keys ) person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey, email=person['email']) user_record = RegUser (hrn=person_hrn, gid=person_gid, pointer=person['person_id'], authority=get_authority(person_hrn), email=person['email']) if pubkey: user_record.reg_keys=[RegKey (pubkey['key'], pubkey['key_id'])] else: self.logger.warning("No key found for user %s"%user_record) user_record.just_created() global_dbsession.add (user_record) global_dbsession.commit() self.logger.info("PlImporter: imported person: %s" % user_record) self.remember_record ( user_record ) else: # update the record ? # # if a user key has changed then we need to update the # users gid by forcing an update here # # right now, SFA only has *one* key attached to a user, and this is # the key that the GID was made with # so the logic here is, we consider that things are OK (unchanged) if # all the SFA keys are present as PLC keys # otherwise we trigger the creation of a new gid from *some* plc key # and record this on the SFA side # it would make sense to add a feature in PLC so that one could pick a 'primary' # key but this is not available on the myplc side for now # = or = it would be much better to support several keys in SFA but that # does not seem doable without a major overhaul in the data model as # a GID is attached to a hrn, but it's also linked to a key, so... # NOTE: with this logic, the first key entered in PLC remains the one # current in SFA until it is removed from PLC sfa_keys = user_record.reg_keys def sfa_key_in_list (sfa_key,plc_keys): for plc_key in plc_keys: if plc_key['key']==sfa_key.key: return True return False # are all the SFA keys known to PLC ? new_keys=False if not sfa_keys and plc_keys: new_keys=True else: for sfa_key in sfa_keys: if not sfa_key_in_list (sfa_key,plc_keys): new_keys = True if new_keys: (pubkey,pkey) = init_person_key (person, plc_keys) person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey) person_gid.set_email(person['email']) if not pubkey: user_record.reg_keys=[] else: user_record.reg_keys=[ RegKey (pubkey['key'], pubkey['key_id'])] user_record.gid = person_gid user_record.just_updated() self.logger.info("PlImporter: updated person: %s" % user_record) user_record.email = person['email'] global_dbsession.commit() user_record.stale=False # accumulate PIs - PLCAPI has a limitation that when someone has PI role # this is valid for all sites she is in.. # PI is coded with role_id==20 if 20 in person['role_ids']: site_pis.append (user_record) # PL Admins need to marked as PI of the top authority record if 10 in person['role_ids'] and user_record not in top_auth_record.reg_pis: admins.append(user_record) except: self.logger.log_exc("PlImporter: failed to import person %d %s"%(person['person_id'],person['email'])) # maintain the list of PIs for a given site # for the record, Jordan had proposed the following addition as a welcome hotfix to a previous version: # site_pis = list(set(site_pis)) # this was likely due to a bug in the above logic, that had to do with disabled persons # being improperly handled, and where the whole loop on persons # could be performed twice with the same person... # so hopefully we do not need to eliminate duplicates explicitly here anymore site_record.reg_pis = list(set(site_pis)) global_dbsession.commit() # import slices for slice_id in site['slice_ids']: try: slice = slices_by_id[slice_id] except: self.logger.warning ("PlImporter: cannot locate slice_id %s - ignored"%slice_id) continue #slice_hrn = slicename_to_hrn(interface_hrn, slice['name']) slice_hrn = slice['hrn'] if slice_hrn is None: self.logger.warning("Slice %s has no hrn - skipped"%slice['name']) continue slice_record = self.locate_by_type_hrn ('slice', slice_hrn) if not slice_record: try: pkey = Keypair(create=True) urn = hrn_to_urn(slice_hrn, 'slice') slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, pointer=slice['slice_id'], authority=get_authority(slice_hrn)) slice_record.just_created() global_dbsession.add(slice_record) global_dbsession.commit() self.logger.info("PlImporter: imported slice: %s" % slice_record) self.remember_record ( slice_record ) except: self.logger.log_exc("PlImporter: failed to import slice %s (%s)"%(slice_hrn,slice['name'])) else: # xxx update the record ... # given that we record the current set of users anyways, there does not seem to be much left to do here # self.logger.warning ("Slice update not yet implemented on slice %s (%s)"%(slice_hrn,slice['name'])) pass # record current users affiliated with the slice slice_record.reg_researchers = \ [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['person_ids'] ] global_dbsession.commit() slice_record.stale=False # Set PL Admins as PI's of the top authority if admins: top_auth_record.reg_pis = list(set(admins)) global_dbsession.commit() self.logger.info('PlImporter: set PL admins %s as PIs of %s'%(admins,top_auth_record.hrn)) ### remove stale records # special records must be preserved system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager'] for record in all_records: if record.hrn in system_hrns: record.stale=False if record.peer_authority: record.stale=False if ".vini" in interface_hrn and interface_hrn.endswith('vini') and \ record.hrn.endswith("internet2"): record.stale=False for record in all_records: try: stale=record.stale except: stale=True self.logger.warning("stale not found with %s"%record) if stale: self.logger.info("PlImporter: deleting stale record: %s" % record) global_dbsession.delete(record) global_dbsession.commit()
def get_key_from_incoming_ip(self, api): dbsession = api.dbsession() # verify that the callers's ip address exist in the db and is an interface # for a node in the db (ip, port) = api.remote_addr interfaces = api.driver.shell.GetInterfaces({'ip': ip}, ['node_id']) if not interfaces: raise NonExistingRecord("no such ip %(ip)s" % locals()) nodes = api.driver.shell.GetNodes([interfaces[0]['node_id']], ['node_id', 'hostname']) if not nodes: raise NonExistingRecord("no such node using ip %(ip)s" % locals()) node = nodes[0] # look up the sfa record record = dbsession.query(RegRecord).filter_by( type='node', pointer=node['node_id']).first() if not record: raise RecordNotFound("node with pointer %s" % node['node_id']) # generate a new keypair and gid uuid = create_uuid() pkey = Keypair(create=True) urn = hrn_to_urn(record.hrn, record.type) gid_object = api.auth.hierarchy.create_gid(urn, uuid, pkey) gid = gid_object.save_to_string(save_parents=True) record.gid = gid # update the record dbsession.commit() # attempt the scp the key # and gid onto the node # this will only work for planetlab based components (kfd, key_filename) = tempfile.mkstemp() (gfd, gid_filename) = tempfile.mkstemp() pkey.save_to_file(key_filename) gid_object.save_to_file(gid_filename, save_parents=True) host = node['hostname'] key_dest = "/etc/sfa/node.key" gid_dest = "/etc/sfa/node.gid" scp = "/usr/bin/scp" #identity = "/etc/planetlab/root_ssh_key.rsa" identity = "/etc/sfa/root_ssh_key" scp_options = " -i %(identity)s " % locals() scp_options += "-o StrictHostKeyChecking=no " % locals() scp_key_command="%(scp)s %(scp_options)s %(key_filename)s root@%(host)s:%(key_dest)s" %\ locals() scp_gid_command="%(scp)s %(scp_options)s %(gid_filename)s root@%(host)s:%(gid_dest)s" %\ locals() all_commands = [scp_key_command, scp_gid_command] for command in all_commands: (status, output) = commands.getstatusoutput(command) if status: raise Exception, output for filename in [key_filename, gid_filename]: os.unlink(filename) return 1
def run(self, options): config = Config() interface_hrn = config.SFA_INTERFACE_HRN root_auth = config.SFA_REGISTRY_ROOT_AUTH shell = PlShell(config) ######## retrieve all existing SFA objects all_records = global_dbsession.query(RegRecord).all() # create hash by (type,hrn) # we essentially use this to know if a given record is already known to SFA self.records_by_type_hrn = \ dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] ) # create hash by (type,pointer) self.records_by_type_pointer = \ dict ( [ ( (record.type, record.pointer) , record ) for record in all_records if record.pointer != -1] ) # initialize record.stale to True by default, then mark stale=False on the ones that are in use for record in all_records: record.stale = True ######## retrieve PLC data # Get all plc sites # retrieve only required stuf sites = shell.GetSites({ 'peer_id': None, 'enabled': True }, [ 'site_id', 'login_base', 'node_ids', 'slice_ids', 'person_ids', 'name' ]) # create a hash of sites by login_base # sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] ) # Get all plc users persons = shell.GetPersons({ 'peer_id': None, 'enabled': True }, ['person_id', 'email', 'key_ids', 'site_ids', 'role_ids']) # create a hash of persons by person_id persons_by_id = dict([(person['person_id'], person) for person in persons]) # also gather non-enabled user accounts so as to issue relevant warnings disabled_persons = shell.GetPersons({ 'peer_id': None, 'enabled': False }, ['person_id']) disabled_person_ids = [ person['person_id'] for person in disabled_persons ] # Get all plc public keys # accumulate key ids for keys retrieval key_ids = [] for person in persons: key_ids.extend(person['key_ids']) keys = shell.GetKeys({ 'peer_id': None, 'key_id': key_ids, 'key_type': 'ssh' }) # create a hash of keys by key_id keys_by_id = dict([(key['key_id'], key) for key in keys]) # create a dict person_id -> [ (plc)keys ] keys_by_person_id = {} for person in persons: pubkeys = [] for key_id in person['key_ids']: # by construction all the keys we fetched are ssh keys # so gpg keys won't be in there try: key = keys_by_id[key_id] pubkeys.append(key) except: self.logger.warning( "Could not spot key %d - probably non-ssh" % key_id) keys_by_person_id[person['person_id']] = pubkeys # Get all plc nodes nodes = shell.GetNodes({'peer_id': None}, ['node_id', 'hostname', 'site_id']) # create hash by node_id nodes_by_id = dict([( node['node_id'], node, ) for node in nodes]) # Get all plc slices slices = shell.GetSlices({'peer_id': None}, ['slice_id', 'name', 'person_ids']) # create hash by slice_id slices_by_id = dict([(slice['slice_id'], slice) for slice in slices]) # isolate special vini case in separate method self.create_special_vini_record(interface_hrn) # start importing for site in sites: try: site_sfa_created = shell.GetSiteSfaCreated(site['site_id']) except: site_sfa_created = None if site['name'].startswith('sfa:') or site_sfa_created == 'True': continue site_hrn = _get_site_hrn(interface_hrn, site) # import if hrn is not in list of existing hrns or if the hrn exists # but its not a site record site_record = self.locate_by_type_hrn('authority', site_hrn) if not site_record: try: urn = hrn_to_urn(site_hrn, 'authority') if not self.auth_hierarchy.auth_exists(urn): self.auth_hierarchy.create_auth(urn) auth_info = self.auth_hierarchy.get_auth_info(urn) site_record = RegAuthority( hrn=site_hrn, gid=auth_info.get_gid_object(), pointer=site['site_id'], authority=get_authority(site_hrn)) site_record.just_created() global_dbsession.add(site_record) global_dbsession.commit() self.logger.info( "PlImporter: imported authority (site) : %s" % site_record) self.remember_record(site_record) except: # if the site import fails then there is no point in trying to import the # site's child records (node, slices, persons), so skip them. self.logger.log_exc( "PlImporter: failed to import site %s. Skipping child records" % site_hrn) continue else: # xxx update the record ... pass site_record.stale = False # import node records for node_id in site['node_ids']: try: node = nodes_by_id[node_id] except: self.logger.warning( "PlImporter: cannot find node_id %s - ignored" % node_id) continue site_auth = get_authority(site_hrn) site_name = site['login_base'] node_hrn = hostname_to_hrn(site_auth, site_name, node['hostname']) # xxx this sounds suspicious if len(node_hrn) > 64: node_hrn = node_hrn[:64] node_record = self.locate_by_type_hrn('node', node_hrn) if not node_record: try: pkey = Keypair(create=True) urn = hrn_to_urn(node_hrn, 'node') node_gid = self.auth_hierarchy.create_gid( urn, create_uuid(), pkey) node_record = RegNode( hrn=node_hrn, gid=node_gid, pointer=node['node_id'], authority=get_authority(node_hrn)) node_record.just_created() global_dbsession.add(node_record) global_dbsession.commit() self.logger.info("PlImporter: imported node: %s" % node_record) self.remember_record(node_record) except: self.logger.log_exc( "PlImporter: failed to import node %s" % node_hrn) continue else: # xxx update the record ... pass node_record.stale = False site_pis = [] # import persons for person_id in site['person_ids']: proceed = False if person_id in persons_by_id: person = persons_by_id[person_id] proceed = True elif person_id in disabled_person_ids: pass else: self.logger.warning( "PlImporter: cannot locate person_id %s in site %s - ignored" % (person_id, site_hrn)) # make sure to NOT run this if anything is wrong if not proceed: continue person_hrn = email_to_hrn(site_hrn, person['email']) # xxx suspicious again if len(person_hrn) > 64: person_hrn = person_hrn[:64] person_urn = hrn_to_urn(person_hrn, 'user') user_record = self.locate_by_type_hrn('user', person_hrn) # return a tuple pubkey (a plc key object) and pkey (a Keypair object) def init_person_key(person, plc_keys): pubkey = None if person['key_ids']: # randomly pick first key in set pubkey = plc_keys[0] try: pkey = convert_public_key(pubkey['key']) except: self.logger.warn( 'PlImporter: unable to convert public key for %s' % person_hrn) pkey = Keypair(create=True) else: # the user has no keys. Creating a random keypair for the user's gid self.logger.warn( "PlImporter: person %s does not have a PL public key" % person_hrn) pkey = Keypair(create=True) return (pubkey, pkey) # new person try: plc_keys = keys_by_person_id.get(person['person_id'], []) if not user_record: (pubkey, pkey) = init_person_key(person, plc_keys) person_gid = self.auth_hierarchy.create_gid( person_urn, create_uuid(), pkey, email=person['email']) user_record = RegUser( hrn=person_hrn, gid=person_gid, pointer=person['person_id'], authority=get_authority(person_hrn), email=person['email']) if pubkey: user_record.reg_keys = [ RegKey(pubkey['key'], pubkey['key_id']) ] else: self.logger.warning("No key found for user %s" % user_record) user_record.just_created() global_dbsession.add(user_record) global_dbsession.commit() self.logger.info("PlImporter: imported person: %s" % user_record) self.remember_record(user_record) else: # update the record ? # # if a user key has changed then we need to update the # users gid by forcing an update here # # right now, SFA only has *one* key attached to a user, and this is # the key that the GID was made with # so the logic here is, we consider that things are OK (unchanged) if # all the SFA keys are present as PLC keys # otherwise we trigger the creation of a new gid from *some* plc key # and record this on the SFA side # it would make sense to add a feature in PLC so that one could pick a 'primary' # key but this is not available on the myplc side for now # = or = it would be much better to support several keys in SFA but that # does not seem doable without a major overhaul in the data model as # a GID is attached to a hrn, but it's also linked to a key, so... # NOTE: with this logic, the first key entered in PLC remains the one # current in SFA until it is removed from PLC sfa_keys = user_record.reg_keys def sfa_key_in_list(sfa_key, plc_keys): for plc_key in plc_keys: if plc_key['key'] == sfa_key.key: return True return False # are all the SFA keys known to PLC ? new_keys = False if not sfa_keys and plc_keys: new_keys = True else: for sfa_key in sfa_keys: if not sfa_key_in_list(sfa_key, plc_keys): new_keys = True if new_keys: (pubkey, pkey) = init_person_key(person, plc_keys) person_gid = self.auth_hierarchy.create_gid( person_urn, create_uuid(), pkey) person_gid.set_email(person['email']) if not pubkey: user_record.reg_keys = [] else: user_record.reg_keys = [ RegKey(pubkey['key'], pubkey['key_id']) ] user_record.gid = person_gid user_record.just_updated() self.logger.info("PlImporter: updated person: %s" % user_record) user_record.email = person['email'] global_dbsession.commit() user_record.stale = False # accumulate PIs - PLCAPI has a limitation that when someone has PI role # this is valid for all sites she is in.. # PI is coded with role_id==20 if 20 in person['role_ids']: site_pis.append(user_record) except: self.logger.log_exc( "PlImporter: failed to import person %d %s" % (person['person_id'], person['email'])) # maintain the list of PIs for a given site # for the record, Jordan had proposed the following addition as a welcome hotfix to a previous version: # site_pis = list(set(site_pis)) # this was likely due to a bug in the above logic, that had to do with disabled persons # being improperly handled, and where the whole loop on persons # could be performed twice with the same person... # so hopefully we do not need to eliminate duplicates explicitly here anymore site_record.reg_pis = list(set(site_pis)) global_dbsession.commit() # import slices for slice_id in site['slice_ids']: try: slice = slices_by_id[slice_id] except: self.logger.warning( "PlImporter: cannot locate slice_id %s - ignored" % slice_id) slice_hrn = slicename_to_hrn(interface_hrn, slice['name']) slice_record = self.locate_by_type_hrn('slice', slice_hrn) if not slice_record: try: pkey = Keypair(create=True) urn = hrn_to_urn(slice_hrn, 'slice') slice_gid = self.auth_hierarchy.create_gid( urn, create_uuid(), pkey) slice_record = RegSlice( hrn=slice_hrn, gid=slice_gid, pointer=slice['slice_id'], authority=get_authority(slice_hrn)) slice_record.just_created() global_dbsession.add(slice_record) global_dbsession.commit() self.logger.info("PlImporter: imported slice: %s" % slice_record) self.remember_record(slice_record) except: self.logger.log_exc( "PlImporter: failed to import slice %s (%s)" % (slice_hrn, slice['name'])) else: # xxx update the record ... # given that we record the current set of users anyways, there does not seem to be much left to do here # self.logger.warning ("Slice update not yet implemented on slice %s (%s)"%(slice_hrn,slice['name'])) pass # record current users affiliated with the slice slice_record.reg_researchers = \ [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['person_ids'] ] global_dbsession.commit() slice_record.stale = False ### remove stale records # special records must be preserved system_hrns = [ interface_hrn, root_auth, interface_hrn + '.slicemanager' ] for record in all_records: if record.hrn in system_hrns: record.stale = False if record.peer_authority: record.stale = False if ".vini" in interface_hrn and interface_hrn.endswith('vini') and \ record.hrn.endswith("internet2"): record.stale = False for record in all_records: try: stale = record.stale except: stale = True self.logger.warning("stale not found with %s" % record) if stale: self.logger.info("PlImporter: deleting stale record: %s" % record) global_dbsession.delete(record) global_dbsession.commit()
def Register(self, api, record_dict): logger.debug("Register: entering with record_dict=%s"%printable(record_dict)) normalize_input_record (record_dict) logger.debug("Register: normalized record_dict=%s"%printable(record_dict)) dbsession=api.dbsession() hrn, type = record_dict['hrn'], record_dict['type'] urn = hrn_to_urn(hrn,type) # validate the type if type not in ['authority', 'slice', 'node', 'user']: raise UnknownSfaType(type) # check if record_dict already exists existing_records = dbsession.query(RegRecord).filter_by(type=type,hrn=hrn).all() if existing_records: raise ExistingRecord(hrn) assert ('type' in record_dict) # returns the right type of RegRecord according to type in record record = make_record(dict=record_dict) record.just_created() record.authority = get_authority(record.hrn) auth_info = api.auth.get_auth_info(record.authority) pub_key = None # make sure record has a gid if not record.gid: uuid = create_uuid() pkey = Keypair(create=True) pub_key=getattr(record,'reg-keys',None) if pub_key is not None: # use only first key in record if pub_key and isinstance(pub_key, types.ListType): pub_key = pub_key[0] pkey = convert_public_key(pub_key) email=getattr(record,'email',None) gid_object = api.auth.hierarchy.create_gid(urn, uuid, pkey, email = email) gid = gid_object.save_to_string(save_parents=True) record.gid = gid if isinstance (record, RegAuthority): # update the tree if not api.auth.hierarchy.auth_exists(hrn): api.auth.hierarchy.create_auth(hrn_to_urn(hrn,'authority')) # get the GID from the newly created authority auth_info = api.auth.get_auth_info(hrn) gid = auth_info.get_gid_object() record.gid=gid.save_to_string(save_parents=True) # locate objects for relationships pi_hrns = getattr(record,'reg-pis',None) if pi_hrns is not None: record.update_pis (pi_hrns, dbsession) elif isinstance (record, RegSlice): researcher_hrns = getattr(record,'reg-researchers',None) if researcher_hrns is not None: record.update_researchers (researcher_hrns, dbsession) elif isinstance (record, RegUser): # create RegKey objects for incoming keys if hasattr(record,'reg-keys'): keys=getattr(record,'reg-keys') # some people send the key as a string instead of a list of strings if isinstance(keys,types.StringTypes): keys=[keys] logger.debug ("creating %d keys for user %s"%(len(keys),record.hrn)) record.reg_keys = [ RegKey (key) for key in keys ] # update testbed-specific data if needed pointer = api.driver.register (record.__dict__, hrn, pub_key) record.pointer=pointer dbsession.add(record) dbsession.commit() # update membership for researchers, pis, owners, operators self.update_driver_relations (api, record, record) return record.get_gid_object().save_to_string(save_parents=True)
def Update(self, api, record_dict): dbsession = api.dbsession() assert ('type' in record_dict) new_record = make_record(dict=record_dict) (type, hrn) = (new_record.type, new_record.hrn) # make sure the record exists record = dbsession.query(RegRecord).filter_by(type=type, hrn=hrn).first() if not record: raise RecordNotFound("hrn=%s, type=%s" % (hrn, type)) record.just_updated() # Use the pointer from the existing record, not the one that the user # gave us. This prevents the user from inserting a forged pointer pointer = record.pointer # is there a change in keys ? new_key = None if type == 'user': if getattr(new_record, 'keys', None): new_key = new_record.keys if isinstance(new_key, types.ListType): new_key = new_key[0] # take new_key into account if new_key: # update the openssl key and gid pkey = convert_public_key(new_key) uuid = create_uuid() urn = hrn_to_urn(hrn, type) gid_object = api.auth.hierarchy.create_gid(urn, uuid, pkey) gid = gid_object.save_to_string(save_parents=True) # xxx should do side effects from new_record to record # not too sure how to do that # not too big a deal with planetlab as the driver is authoritative, but... # update native relations if isinstance(record, RegSlice): researcher_hrns = getattr(new_record, 'researcher', None) if researcher_hrns is not None: record.update_researchers(researcher_hrns, dbsession) elif isinstance(record, RegAuthority): pi_hrns = getattr(new_record, 'pi', None) if pi_hrns is not None: record.update_pis(pi_hrns, dbsession) # update the PLC information that was specified with the record # xxx oddly enough, without this useless statement, # record.__dict__ as received by the driver seems to be off # anyway the driver should receive an object # (and then extract __dict__ itself if needed) print "DO NOT REMOVE ME before driver.update, record=%s" % record new_key_pointer = -1 try: (pointer, new_key_pointer) = api.driver.update(record.__dict__, new_record.__dict__, hrn, new_key) except: pass if new_key and new_key_pointer: record.reg_keys = [RegKey(new_key, new_key_pointer)] record.gid = gid dbsession.commit() # update membership for researchers, pis, owners, operators self.update_driver_relations(api, record, new_record) return 1
def import_single_slice(self, slicename): ''' Method to import a single slice from the testbed database to the SFA Registry. The slice being imported is specified by name. The method is used in the verify_slice method (clab_slices.py) when a slice is automatically created in the testbed database. :param slicename: name of the slice being imported :type string ''' config = Config() interface_hrn = config.SFA_INTERFACE_HRN root_auth = config.SFA_REGISTRY_ROOT_AUTH shell = ClabShell(config) self.logger.debug("Import Single slice: %s" % slicename) # retrieve all existing SFA objects all_records = global_dbsession.query(RegRecord).all() # Dicts to avoid duplicates in SFA database # create dict keyed by (type,hrn) self.records_by_type_hrn = dict([((record.type, record.hrn), record) for record in all_records]) # create dict keyed by (type,pointer) self.records_by_type_pointer = dict([((record.type, record.pointer), record) for record in all_records if record.pointer != -1]) # Retrieve data from the CLab testbed and create dictionaries by id # SITE site = shell.get_testbed_info() # SLICES slice = shell.get_slice_by(slice_name=slicename) # Import records to the SFA registry # SITE # Get hrn of the site (authority) site_hrn = _get_site_hrn(interface_hrn, site) # Try to locate the site_hrn in the SFA records #site_record=self.locate_by_type_hrn ('authority', site_hrn) # For the current site authority, import child entities/records # SLICES # Obtain parameters of the node: site_auth, site_name and hrn of the slice slice_hrn = slicename_to_hrn(slice['name'], site_hrn) # Try to locate the slice_hrn in the SFA records slice_record = self.locate_by_type_hrn('slice', slice_hrn) if not slice_record: # Create/Import record for the slice try: #Create a keypair for the slice pkey = Keypair(create=True) # Obtain parameters urn = hrn_to_urn(slice_hrn, 'slice') slice_gid = self.auth_hierarchy.create_gid( urn, create_uuid(), pkey) # Create record for the slice and add it to the Registry slice_record = RegSlice(hrn=slice_hrn, gid=slice_gid, pointer=slice['id'], authority=get_authority(slice_hrn)) slice_record.just_created() global_dbsession.add(slice_record) global_dbsession.commit() self.logger.info("CLabImporter: imported slice: %s" % slice_hrn) self.remember_record(slice_record) except: self.logger.log_exc("CLabImporter: failed to import slice") else: # Slice record already in the SFA registry. Update? self.logger.warning("Slice already existing in SFA Registry") pass # Get current users associated with the slice users_of_slice = shell.get_users_by_slice(slice) # record current users associated with the slice slice_record.reg_researchers = \ [ self.locate_by_type_pointer ('user',user['id']) for user in users_of_slice] global_dbsession.commit()
def Register(self, api, record_dict): dbsession = api.dbsession() hrn, type = record_dict['hrn'], record_dict['type'] urn = hrn_to_urn(hrn, type) # validate the type if type not in ['authority', 'slice', 'node', 'user']: raise UnknownSfaType(type) # check if record_dict already exists existing_records = dbsession.query(RegRecord).filter_by(type=type, hrn=hrn).all() if existing_records: raise ExistingRecord(hrn) assert ('type' in record_dict) # returns the right type of RegRecord according to type in record record = make_record(dict=record_dict) record.just_created() record.authority = get_authority(record.hrn) auth_info = api.auth.get_auth_info(record.authority) pub_key = None # make sure record has a gid if not record.gid: uuid = create_uuid() pkey = Keypair(create=True) if getattr(record, 'keys', None): pub_key = record.keys # use only first key in record if isinstance(record.keys, types.ListType): pub_key = record.keys[0] pkey = convert_public_key(pub_key) gid_object = api.auth.hierarchy.create_gid(urn, uuid, pkey) gid = gid_object.save_to_string(save_parents=True) record.gid = gid if isinstance(record, RegAuthority): # update the tree if not api.auth.hierarchy.auth_exists(hrn): api.auth.hierarchy.create_auth(hrn_to_urn(hrn, 'authority')) # get the GID from the newly created authority auth_info = api.auth.get_auth_info(hrn) gid = auth_info.get_gid_object() record.gid = gid.save_to_string(save_parents=True) # locate objects for relationships pi_hrns = getattr(record, 'pi', None) if pi_hrns is not None: record.update_pis(pi_hrns, dbsession) elif isinstance(record, RegSlice): researcher_hrns = getattr(record, 'researcher', None) if researcher_hrns is not None: record.update_researchers(researcher_hrns, dbsession) elif isinstance(record, RegUser): # create RegKey objects for incoming keys if hasattr(record, 'keys'): logger.debug("creating %d keys for user %s" % (len(record.keys), record.hrn)) record.reg_keys = [RegKey(key) for key in record.keys] # update testbed-specific data if needed pointer = api.driver.register(record.__dict__, hrn, pub_key) record.pointer = pointer dbsession.add(record) dbsession.commit() # update membership for researchers, pis, owners, operators self.update_driver_relations(api, record, record) return record.get_gid_object().save_to_string(save_parents=True)
class Hierarchy: ## # Create the hierarchy object. # # @param basedir the base directory to store the hierarchy in def __init__(self, basedir = None): self.config = Config() if not basedir: basedir = os.path.join(self.config.SFA_DATA_DIR, "authorities") self.basedir = basedir ## # Given a hrn, return the filenames of the GID, private key # files. # # @param xrn the human readable name of the authority (urn will be convertd to hrn) def get_auth_filenames(self, xrn): hrn, type = urn_to_hrn(xrn) if '\\' in hrn: hrn = hrn.replace('\\', '') leaf = hrn else: leaf = get_leaf(hrn) parent_hrn = get_authority(hrn) directory = os.path.join(self.basedir, hrn.replace(".", "/")) gid_filename = os.path.join(directory, leaf+".gid") privkey_filename = os.path.join(directory, leaf+".pkey") return (directory, gid_filename, privkey_filename) ## # Check to see if an authority exists. An authority exists if it's disk # files exist. # # @param the human readable name of the authority to check def auth_exists(self, xrn): hrn, type = urn_to_hrn(xrn) (directory, gid_filename, privkey_filename) = \ self.get_auth_filenames(hrn) return os.path.exists(gid_filename) and os.path.exists(privkey_filename) ## # Create an authority. A private key for the authority and the associated # GID are created and signed by the parent authority. # # @param xrn the human readable name of the authority to create (urn will be converted to hrn) # @param create_parents if true, also create the parents if they do not exist def create_auth(self, xrn, create_parents=False): hrn, type = urn_to_hrn(str(xrn)) logger.debug("Hierarchy: creating authority: %s"% hrn) # create the parent authority if necessary parent_hrn = get_authority(hrn) parent_urn = hrn_to_urn(parent_hrn, 'authority') if (parent_hrn) and (not self.auth_exists(parent_urn)) and (create_parents): self.create_auth(parent_urn, create_parents) (directory, gid_filename, privkey_filename,) = \ self.get_auth_filenames(hrn) # create the directory to hold the files try: os.makedirs(directory) # if the path already exists then pass except OSError, (errno, strerr): if errno == 17: pass if os.path.exists(privkey_filename): logger.debug("using existing key %r for authority %r"%(privkey_filename,hrn)) pkey = Keypair(filename = privkey_filename) else: pkey = Keypair(create = True) pkey.save_to_file(privkey_filename) gid = self.create_gid(xrn, create_uuid(), pkey) gid.save_to_file(gid_filename, save_parents=True)
def register(api, record): hrn, type = record['hrn'], record['type'] urn = hrn_to_urn(hrn,type) # validate the type if type not in ['authority', 'slice', 'node', 'user']: raise UnknownSfaType(type) # check if record already exists table = SfaTable() existing_records = table.find({'type': type, 'hrn': hrn}) if existing_records: raise ExistingRecord(hrn) record = SfaRecord(dict = record) record['authority'] = get_authority(record['hrn']) type = record['type'] hrn = record['hrn'] auth_info = api.auth.get_auth_info(record['authority']) pub_key = None # make sure record has a gid if 'gid' not in record: uuid = create_uuid() pkey = Keypair(create=True) if 'key' in record and record['key']: if isinstance(record['key'], types.ListType): pub_key = record['key'][0] else: pub_key = record['key'] pkey = convert_public_key(pub_key) gid_object = api.auth.hierarchy.create_gid(urn, uuid, pkey) gid = gid_object.save_to_string(save_parents=True) record['gid'] = gid record.set_gid(gid) if type in ["authority"]: # update the tree if not api.auth.hierarchy.auth_exists(hrn): api.auth.hierarchy.create_auth(hrn_to_urn(hrn,'authority')) # get the GID from the newly created authority gid = auth_info.get_gid_object() record.set_gid(gid.save_to_string(save_parents=True)) pl_record = api.sfa_fields_to_pl_fields(type, hrn, record) sites = api.plshell.GetSites(api.plauth, [pl_record['login_base']]) if not sites: pointer = api.plshell.AddSite(api.plauth, pl_record) else: pointer = sites[0]['site_id'] record.set_pointer(pointer) record['pointer'] = pointer elif (type == "slice"): acceptable_fields=['url', 'instantiation', 'name', 'description'] pl_record = api.sfa_fields_to_pl_fields(type, hrn, record) for key in pl_record.keys(): if key not in acceptable_fields: pl_record.pop(key) slices = api.plshell.GetSlices(api.plauth, [pl_record['name']]) if not slices: pointer = api.plshell.AddSlice(api.plauth, pl_record) else: pointer = slices[0]['slice_id'] record.set_pointer(pointer) record['pointer'] = pointer elif (type == "user"): persons = api.plshell.GetPersons(api.plauth, [record['email']]) if not persons: pointer = api.plshell.AddPerson(api.plauth, dict(record)) else: pointer = persons[0]['person_id'] if 'enabled' in record and record['enabled']: api.plshell.UpdatePerson(api.plauth, pointer, {'enabled': record['enabled']}) # add this persons to the site only if he is being added for the first # time by sfa and doesont already exist in plc if not persons or not persons[0]['site_ids']: login_base = get_leaf(record['authority']) api.plshell.AddPersonToSite(api.plauth, pointer, login_base) # What roles should this user have? api.plshell.AddRoleToPerson(api.plauth, 'user', pointer) # Add the user's key if pub_key: api.plshell.AddPersonKey(api.plauth, pointer, {'key_type' : 'ssh', 'key' : pub_key}) elif (type == "node"): pl_record = api.sfa_fields_to_pl_fields(type, hrn, record) login_base = hrn_to_pl_login_base(record['authority']) nodes = api.plshell.GetNodes(api.plauth, [pl_record['hostname']]) if not nodes: pointer = api.plshell.AddNode(api.plauth, login_base, pl_record) else: pointer = nodes[0]['node_id'] record['pointer'] = pointer record.set_pointer(pointer) record_id = table.insert(record) record['record_id'] = record_id # update membership for researchers, pis, owners, operators api.update_membership(None, record) return record.get_gid_object().save_to_string(save_parents=True)
def import_nodes(self, site_node_ids, nodes_by_id, testbed_shell): """ Creates appropriate hostnames and RegNode records for each node in site_node_ids, based on the information given by the dict nodes_by_id that was made from data from OAR. Saves the records to the DB. :param site_node_ids: site's node ids :type site_node_ids: list of integers :param nodes_by_id: dictionary , key is the node id, value is the a dict with node information. :type nodes_by_id: dictionary :param testbed_shell: IotlabDriver object, used to have access to testbed_shell attributes. :type testbed_shell: IotlabDriver :returns: None :rtype: None """ for node_id in site_node_ids: try: node = nodes_by_id[node_id] except KeyError: self.logger.warning("IotlabImporter: cannot find node_id %s \ - ignored" % (node_id)) continue escaped_hrn = \ self.hostname_to_hrn_escaped(testbed_shell.root_auth, node['hostname']) self.logger.info("IOTLABIMPORTER node %s " % (node)) hrn = node['hrn'] # xxx this sounds suspicious if len(hrn) > 64: hrn = hrn[:64] node_record = self.find_record_by_type_hrn('node', hrn) if not node_record: pkey = Keypair(create=True) urn = hrn_to_urn(escaped_hrn, 'node') node_gid = \ self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) def testbed_get_authority(hrn): """ Gets the authority part in the hrn. :param hrn: hrn whose authority we are looking for. :type hrn: string :returns: splits the hrn using the '.' separator and returns the authority part of the hrn. :rtype: string """ return hrn.split(".")[0] node_record = RegNode(hrn=hrn, gid=node_gid, pointer='-1', authority=testbed_get_authority(hrn)) try: node_record.just_created() global_dbsession.add(node_record) global_dbsession.commit() self.logger.info("IotlabImporter: imported node: %s" % node_record) self.update_just_added_records_dict(node_record) except SQLAlchemyError: self.logger.log_exc("IotlabImporter: failed to import node") else: #TODO: xxx update the record ... pass node_record.stale = False
def import_single_node(self, nodename): ''' Method to import a single node from the testbed database to the SFA Registry. The node being imported is specified by name. The method is used in the verify_node method (clab_slices.py) when a node is automatically created in the testbed database. :param nodename: name of the node being imported :type string ''' config = Config () interface_hrn = config.SFA_INTERFACE_HRN root_auth = config.SFA_REGISTRY_ROOT_AUTH shell = ClabShell (config) self.logger.debug("Import Single node: %s"%nodename) # retrieve all existing SFA objects all_records = global_dbsession.query(RegRecord).all() # Dicts to avoid duplicates in SFA database # create dict keyed by (type,hrn) self.records_by_type_hrn = dict([((record.type, record.hrn), record) for record in all_records ] ) # create dict keyed by (type,pointer) self.records_by_type_pointer = dict([((record.type, record.pointer), record) for record in all_records if record.pointer != -1]) # Retrieve data from the CLab testbed and create dictionaries by id # SITE site = shell.get_testbed_info() # NODES node = shell.get_node_by(node_name=nodename) # Import records to the SFA registry # SITE # Get hrn of the site (authority) site_hrn = _get_site_hrn(interface_hrn, site) # Try to locate the site_hrn in the SFA records #site_record=self.locate_by_type_hrn ('authority', site_hrn) # NODE # Obtain parameters of the node: site_auth, site_name and hrn of the node site_auth = get_authority(site_hrn) site_name = site['name'] node_hrn = hostname_to_hrn(site_hrn, node['name']) # Reduce hrn up to 64 characters if len(node_hrn) > 64: node_hrn = node_hrn[:64] # Try to locate the node_hrn in the SFA records node_record = self.locate_by_type_hrn ('node', node_hrn ) if not node_record: # Create/Import record for the node try: # Create a keypair for the node pkey = Keypair(create=True) # Obtain parameters urn = hrn_to_urn(node_hrn, 'node') node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) # Create record for the node and add it to the Registry node_record = RegNode (hrn=node_hrn, gid=node_gid, pointer =node['id'], authority=get_authority(node_hrn)) node_record.just_created() global_dbsession.add(node_record) global_dbsession.commit() self.logger.info("CLabImporter: imported node: %s" %node_hrn) self.remember_record (node_record) except: self.logger.log_exc("CLabImporter: failed to import node") else: # Node record already in the SFA registry. Update? pass
def import_single_slice(self, slicename): ''' Method to import a single slice from the testbed database to the SFA Registry. The slice being imported is specified by name. The method is used in the verify_slice method (clab_slices.py) when a slice is automatically created in the testbed database. :param slicename: name of the slice being imported :type string ''' config = Config () interface_hrn = config.SFA_INTERFACE_HRN root_auth = config.SFA_REGISTRY_ROOT_AUTH shell = ClabShell (config) self.logger.debug("Import Single slice: %s"%slicename) # retrieve all existing SFA objects all_records = global_dbsession.query(RegRecord).all() # Dicts to avoid duplicates in SFA database # create dict keyed by (type,hrn) self.records_by_type_hrn = dict([((record.type, record.hrn), record) for record in all_records ] ) # create dict keyed by (type,pointer) self.records_by_type_pointer = dict([((record.type, record.pointer), record) for record in all_records if record.pointer != -1]) # Retrieve data from the CLab testbed and create dictionaries by id # SITE site = shell.get_testbed_info() # SLICES slice = shell.get_slice_by(slice_name=slicename) # Import records to the SFA registry # SITE # Get hrn of the site (authority) site_hrn = _get_site_hrn(interface_hrn, site) # Try to locate the site_hrn in the SFA records #site_record=self.locate_by_type_hrn ('authority', site_hrn) # For the current site authority, import child entities/records # SLICES # Obtain parameters of the node: site_auth, site_name and hrn of the slice slice_hrn = slicename_to_hrn(slice['name'], site_hrn) # Try to locate the slice_hrn in the SFA records slice_record = self.locate_by_type_hrn ('slice', slice_hrn) if not slice_record: # Create/Import record for the slice try: #Create a keypair for the slice pkey = Keypair(create=True) # Obtain parameters urn = hrn_to_urn(slice_hrn, 'slice') slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) # Create record for the slice and add it to the Registry slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, pointer=slice['id'], authority=get_authority(slice_hrn)) slice_record.just_created() global_dbsession.add(slice_record) global_dbsession.commit() self.logger.info("CLabImporter: imported slice: %s" % slice_hrn) self.remember_record ( slice_record ) except: self.logger.log_exc("CLabImporter: failed to import slice") else: # Slice record already in the SFA registry. Update? self.logger.warning ("Slice already existing in SFA Registry") pass # Get current users associated with the slice users_of_slice = shell.get_users_by_slice(slice) # record current users associated with the slice slice_record.reg_researchers = \ [ self.locate_by_type_pointer ('user',user['id']) for user in users_of_slice] global_dbsession.commit()
def run (self, options): config = Config() slabdriver = SlabDriver(config) #Create special slice table for senslab if not slabdriver.db.exists('slab_xp'): slabdriver.db.createtable() self.logger.info ("SlabImporter.run: slab_xp table created ") #retrieve all existing SFA objects all_records = dbsession.query(RegRecord).all() #create hash by (type,hrn) #used to know if a given record is already known to SFA self.records_by_type_hrn = \ dict ( [ ( (record.type,record.hrn) , record ) for record in all_records ] ) print>>sys.stderr,"\r\n SLABIMPORT \t all_records[0] %s all_records[0].email %s \r\n" %(all_records[0].type, all_records[0]) self.users_rec_by_email = \ dict ( [ (record.email, record) for record in all_records if record.type == 'user' ] ) # create hash by (type,pointer) self.records_by_type_pointer = \ dict ( [ ( (str(record.type),record.pointer) , record ) for record in all_records if record.pointer != -1] ) # initialize record.stale to True by default, then mark stale=False on the ones that are in use for record in all_records: record.stale=True nodes_listdict = slabdriver.GetNodes() nodes_by_id = dict([(node['node_id'],node) for node in nodes_listdict]) sites_listdict = slabdriver.GetSites() ldap_person_listdict = slabdriver.GetPersons() print>>sys.stderr,"\r\n SLABIMPORT \t ldap_person_listdict %s \r\n" %(ldap_person_listdict) slices_listdict = slabdriver.GetSlices() try: slices_by_userid = dict ( [ (one_slice['reg_researchers']['record_id'], one_slice ) for one_slice in slices_listdict ] ) except TypeError: self.logger.log_exc("SlabImporter: failed to create list of slices by user id.") pass for site in sites_listdict: site_hrn = _get_site_hrn(site) site_record = self.find_record_by_type_hrn ('authority', site_hrn) if not site_record: try: urn = hrn_to_urn(site_hrn, 'authority') if not self.auth_hierarchy.auth_exists(urn): self.auth_hierarchy.create_auth(urn) auth_info = self.auth_hierarchy.get_auth_info(urn) site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(), pointer='-1', authority=get_authority(site_hrn)) site_record.just_created() dbsession.add(site_record) dbsession.commit() self.logger.info("SlabImporter: imported authority (site) : %s" % site_record) self.update_just_added_records_dict(site_record) except SQLAlchemyError: # if the site import fails then there is no point in trying to import the # site's child records (node, slices, persons), so skip them. self.logger.log_exc("SlabImporter: failed to import site. Skipping child records") continue else: # xxx update the record ... pass site_record.stale=False # import node records in site for node_id in site['node_ids']: try: node = nodes_by_id[node_id] except: self.logger.warning ("SlabImporter: cannot find node_id %s - ignored"%node_id) continue site_auth = get_authority(site_hrn) site_name = site['name'] escaped_hrn = self.hostname_to_hrn_escaped(slabdriver.root_auth, node['hostname']) print>>sys.stderr, "\r\n \r\n SLABIMPORTER node %s " %(node) hrn = node['hrn'] # xxx this sounds suspicious if len(hrn) > 64: hrn = hrn[:64] node_record = self.find_record_by_type_hrn( 'node', hrn ) if not node_record: try: pkey = Keypair(create=True) urn = hrn_to_urn(escaped_hrn, 'node') node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) def slab_get_authority(hrn): return hrn.split(".")[0] node_record = RegNode (hrn=hrn, gid=node_gid, pointer = '-1', authority=slab_get_authority(hrn)) node_record.just_created() dbsession.add(node_record) dbsession.commit() #self.logger.info("SlabImporter: imported node: %s" % node_record) self.update_just_added_records_dict(node_record) except: self.logger.log_exc("SlabImporter: failed to import node") else: # xxx update the record ... pass node_record.stale=False # import persons for person in ldap_person_listdict : print>>sys.stderr,"SlabImporter: person: %s" %(person['hrn']) if 'ssh-rsa' not in person['pkey']: #people with invalid ssh key (ssh-dss, empty, bullshit keys...) #won't be imported continue person_hrn = person['hrn'] slice_hrn = self.slicename_to_hrn(person['hrn']) # xxx suspicious again if len(person_hrn) > 64: person_hrn = person_hrn[:64] person_urn = hrn_to_urn(person_hrn, 'user') print>>sys.stderr," \r\n SlabImporter: HEYYYYYYYYYY" , self.users_rec_by_email #Check if user using person['email'] form LDAP is already registered #in SFA. One email = one person. Inb this case, do not create another #record for this person #person_hrn returned by GetPErson based on senslab root auth + uid ldap user_record = self.find_record_by_type_hrn('user', person_hrn) if not user_record and person['email'] in self.users_rec_by_email: user_record = self.users_rec_by_email[person['email']] person_hrn = user_record.hrn person_urn = hrn_to_urn(person_hrn, 'user') slice_record = self.find_record_by_type_hrn ('slice', slice_hrn) # return a tuple pubkey (a plc key object) and pkey (a Keypair object) def init_person_key (person, slab_key): pubkey = None if person['pkey']: # randomly pick first key in set pubkey = slab_key try: pkey = convert_public_key(pubkey) except TypeError: #key not good. create another pkey self.logger.warn('SlabImporter: \ unable to convert public \ key for %s' % person_hrn) pkey = Keypair(create=True) else: # the user has no keys. Creating a random keypair for the user's gid self.logger.warn("SlabImporter: person %s does not have a public key"%person_hrn) pkey = Keypair(create=True) return (pubkey, pkey) try: slab_key = person['pkey'] # new person if not user_record: (pubkey,pkey) = init_person_key (person, slab_key ) if pubkey is not None and pkey is not None : person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey) if person['email']: print>>sys.stderr, "\r\n \r\n SLAB IMPORTER PERSON EMAIL OK email %s " %(person['email']) person_gid.set_email(person['email']) user_record = RegUser (hrn=person_hrn, gid=person_gid, pointer='-1', authority=get_authority(person_hrn), email=person['email']) else: user_record = RegUser (hrn=person_hrn, gid=person_gid, pointer='-1', authority=get_authority(person_hrn)) if pubkey: user_record.reg_keys = [RegKey (pubkey)] else: self.logger.warning("No key found for user %s"%user_record) user_record.just_created() dbsession.add (user_record) dbsession.commit() self.logger.info("SlabImporter: imported person: %s" % user_record) self.update_just_added_records_dict( user_record ) else: # update the record ? # if user's primary key has changed then we need to update the # users gid by forcing an update here sfa_keys = user_record.reg_keys new_key=False if slab_key is not sfa_keys : new_key = True if new_key: print>>sys.stderr,"SlabImporter: \t \t USER UPDATE person: %s" %(person['hrn']) (pubkey,pkey) = init_person_key (person, slab_key) person_gid = self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey) if not pubkey: user_record.reg_keys=[] else: user_record.reg_keys=[ RegKey (pubkey)] self.logger.info("SlabImporter: updated person: %s" % user_record) if person['email']: user_record.email = person['email'] dbsession.commit() user_record.stale = False except: self.logger.log_exc("SlabImporter: failed to import person %s"%(person) ) try: slice = slices_by_userid[user_record.record_id] except: self.logger.warning ("SlabImporter: cannot locate slices_by_userid[user_record.record_id] %s - ignored"%user_record) if not slice_record : try: pkey = Keypair(create=True) urn = hrn_to_urn(slice_hrn, 'slice') slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, pointer='-1', authority=get_authority(slice_hrn)) slice_record.just_created() dbsession.add(slice_record) dbsession.commit() #Serial id created after commit #Get it sl_rec = dbsession.query(RegSlice).filter(RegSlice.hrn.match(slice_hrn)).all() #slab_slice = SenslabXP( slice_hrn = slice_hrn, record_id_slice=sl_rec[0].record_id, record_id_user= user_record.record_id) #print>>sys.stderr, "\r\n \r\n SLAB IMPORTER SLICE IMPORT NOTslice_record %s \r\n slab_slice %s" %(sl_rec,slab_slice) #slab_dbsession.add(slab_slice) #slab_dbsession.commit() #self.logger.info("SlabImporter: imported slice: %s" % slice_record) self.update_just_added_records_dict ( slice_record ) except: self.logger.log_exc("SlabImporter: failed to import slice") #No slice update upon import in senslab else: # xxx update the record ... self.logger.warning ("Slice update not yet implemented") pass # record current users affiliated with the slice slice_record.reg_researchers = [user_record] dbsession.commit() slice_record.stale=False ### remove stale records # special records must be preserved system_hrns = [slabdriver.hrn, slabdriver.root_auth, slabdriver.hrn+ '.slicemanager'] for record in all_records: if record.hrn in system_hrns: record.stale=False if record.peer_authority: record.stale=False for record in all_records: if record.type == 'user': print>>sys.stderr,"SlabImporter: stale records: hrn %s %s" %(record.hrn,record.stale) try: stale=record.stale except: stale=True self.logger.warning("stale not found with %s"%record) if stale: self.logger.info("SlabImporter: deleting stale record: %s" % record) #if record.type == 'user': #rec = slab_dbsession.query(SenslabXP).filter_by(record_id_user = record.record_id).first() #slab_dbsession.delete(rec) #slab_dbsession.commit() dbsession.delete(record) dbsession.commit()