def get_leases(self, slice=None, options={}): now = int(time.time()) filter={} filter.update({'clip':now}) if slice: filter.update({'name':slice['name']}) return_fields = ['lease_id', 'hostname', 'site_id', 'name', 't_from', 't_until'] leases = self.driver.shell.GetLeases(filter) grain = self.driver.shell.GetLeaseGranularity() site_ids = [] for lease in leases: site_ids.append(lease['site_id']) # get sites sites_dict = self.get_sites({'site_id': site_ids}) rspec_leases = [] for lease in leases: rspec_lease = Lease() # xxx how to retrieve site['login_base'] site_id=lease['site_id'] site=sites_dict[site_id] rspec_lease['component_id'] = hrn_to_urn(self.driver.shell.GetNodeHrn(lease['hostname']), 'node') slice_hrn = self.driver.shell.GetSliceHrn(lease['slice_id']) slice_urn = hrn_to_urn(slice_hrn, 'slice') rspec_lease['slice_id'] = slice_urn rspec_lease['start_time'] = lease['t_from'] rspec_lease['duration'] = (lease['t_until'] - lease['t_from']) / grain rspec_leases.append(rspec_lease) return rspec_leases
def fill_record_sfa_info(self, records): def startswith(prefix, values): return [value for value in values if value.startswith(prefix)] # get user ids user_ids = [] for record in records: user_ids.extend(record.get("user_ids", [])) # get sfa records for all records associated with these records. # we'll replace pl ids (person_ids) with hrns from the sfa records # we obtain # get the registry records user_list, users = [], {} user_list = dbsession.query(RegRecord).filter(RegRecord.pointer.in_(user_ids)) # create a hrns keyed on the sfa record's pointer. # Its possible for multiple records to have the same pointer so # the dict's value will be a list of hrns. users = defaultdict(list) for user in user_list: users[user.pointer].append(user) # get the dummy records dummy_user_list, dummy_users = [], {} dummy_user_list = self.shell.GetUsers({"user_ids": user_ids}) dummy_users = list_to_dict(dummy_user_list, "user_id") # fill sfa info for record in records: # skip records with no pl info (top level authorities) # if record['pointer'] == -1: # continue sfa_info = {} type = record["type"] logger.info("fill_record_sfa_info - incoming record typed %s" % type) if type == "slice": # all slice users are researchers record["geni_urn"] = hrn_to_urn(record["hrn"], "slice") record["PI"] = [] record["researcher"] = [] for user_id in record.get("user_ids", []): hrns = [user.hrn for user in users[user_id]] record["researcher"].extend(hrns) elif type.startswith("authority"): record["url"] = None logger.info("fill_record_sfa_info - authority xherex") elif type == "node": sfa_info["dns"] = record.get("hostname", "") # xxx TODO: URI, LatLong, IP, DNS elif type == "user": logger.info("setting user.email") sfa_info["email"] = record.get("email", "") sfa_info["geni_urn"] = hrn_to_urn(record["hrn"], "user") sfa_info["geni_certificate"] = record["gid"] # xxx TODO: PostalAddress, Phone record.update(sfa_info)
def fill_record_sfa_info(self, records): def startswith(prefix, values): return [value for value in values if value.startswith(prefix)] # get user ids user_ids = [] for record in records: user_ids.extend(record.get("user_ids", [])) # get the registry records user_list, users = [], {} user_list = self.api.dbsession().query(RegRecord).filter( RegRecord.pointer.in_(user_ids)).all() # create a hrns keyed on the sfa record's pointer. # Its possible for multiple records to have the same pointer so # the dict's value will be a list of hrns. users = defaultdict(list) for user in user_list: users[user.pointer].append(user) # get the nitos records nitos_user_list, nitos_users = [], {} nitos_all_users = self.convert_id(self.shell.getUsers()) nitos_user_list = [ user for user in nitos_all_users if user['user_id'] in user_ids ] nitos_users = list_to_dict(nitos_user_list, 'user_id') # fill sfa info for record in records: if record['pointer'] == -1: continue sfa_info = {} type = record['type'] logger.info("fill_record_sfa_info - incoming record typed %s" % type) if (type == "slice"): # all slice users are researchers record['geni_urn'] = hrn_to_urn(record['hrn'], 'slice') record['researcher'] = [] for user_id in record.get('user_ids', []): hrns = [user.hrn for user in users[user_id]] record['researcher'].extend(hrns) elif (type == "node"): sfa_info['dns'] = record.get("hostname", "") # xxx TODO: URI, LatLong, IP, DNS elif (type == "user"): logger.info('setting user.email') sfa_info['email'] = record.get("email", "") sfa_info['geni_urn'] = hrn_to_urn(record['hrn'], 'user') sfa_info['geni_certificate'] = record['gid'] # xxx TODO: PostalAddress, Phone record.update(sfa_info)
def fill_record_sfa_info(self, records): def startswith(prefix, values): return [value for value in values if value.startswith(prefix)] # get user ids user_ids = [] for record in records: user_ids.extend(record.get("user_ids", [])) # get the registry records user_list, users = [], {} user_list = dbsession.query(RegRecord).filter(RegRecord.pointer.in_(user_ids)).all() # create a hrns keyed on the sfa record's pointer. # Its possible for multiple records to have the same pointer so # the dict's value will be a list of hrns. users = defaultdict(list) for user in user_list: users[user.pointer].append(user) # get the nitos records nitos_user_list, nitos_users = [], {} nitos_all_users = self.convert_id(self.shell.getUsers()) nitos_user_list = [user for user in nitos_all_users if user['user_id'] in user_ids] nitos_users = list_to_dict(nitos_user_list, 'user_id') # fill sfa info for record in records: if record['pointer'] == -1: continue sfa_info = {} type = record['type'] logger.info("fill_record_sfa_info - incoming record typed %s"%type) if (type == "slice"): # all slice users are researchers record['geni_urn'] = hrn_to_urn(record['hrn'], 'slice') record['researcher'] = [] for user_id in record.get('user_ids', []): hrns = [user.hrn for user in users[user_id]] record['researcher'].extend(hrns) elif (type == "node"): sfa_info['dns'] = record.get("hostname", "") # xxx TODO: URI, LatLong, IP, DNS elif (type == "user"): logger.info('setting user.email') sfa_info['email'] = record.get("email", "") sfa_info['geni_urn'] = hrn_to_urn(record['hrn'], 'user') sfa_info['geni_certificate'] = record['gid'] # xxx TODO: PostalAddress, Phone record.update(sfa_info)
def get_key_from_incoming_ip (self, api): dbsession=api.dbsession() # verify that the callers's ip address exist in the db and is an interface # for a node in the db (ip, port) = api.remote_addr interfaces = api.driver.shell.GetInterfaces({'ip': ip}, ['node_id']) if not interfaces: raise NonExistingRecord("no such ip %(ip)s" % locals()) nodes = api.driver.shell.GetNodes([interfaces[0]['node_id']], ['node_id', 'hostname']) if not nodes: raise NonExistingRecord("no such node using ip %(ip)s" % locals()) node = nodes[0] # look up the sfa record record=dbsession.query(RegRecord).filter_by(type='node',pointer=node['node_id']).first() if not record: raise RecordNotFound("node with pointer %s"%node['node_id']) # generate a new keypair and gid uuid = create_uuid() pkey = Keypair(create=True) urn = hrn_to_urn(record.hrn, record.type) gid_object = api.auth.hierarchy.create_gid(urn, uuid, pkey) gid = gid_object.save_to_string(save_parents=True) record.gid = gid # update the record dbsession.commit() # attempt the scp the key # and gid onto the node # this will only work for planetlab based components (kfd, key_filename) = tempfile.mkstemp() (gfd, gid_filename) = tempfile.mkstemp() pkey.save_to_file(key_filename) gid_object.save_to_file(gid_filename, save_parents=True) host = node['hostname'] key_dest="/etc/sfa/node.key" gid_dest="/etc/sfa/node.gid" scp = "/usr/bin/scp" #identity = "/etc/planetlab/root_ssh_key.rsa" identity = "/etc/sfa/root_ssh_key" scp_options=" -i %(identity)s " % locals() scp_options+="-o StrictHostKeyChecking=no " % locals() scp_key_command="%(scp)s %(scp_options)s %(key_filename)s root@%(host)s:%(key_dest)s" %\ locals() scp_gid_command="%(scp)s %(scp_options)s %(gid_filename)s root@%(host)s:%(gid_dest)s" %\ locals() all_commands = [scp_key_command, scp_gid_command] for command in all_commands: (status, output) = commands.getstatusoutput(command) if status: raise Exception, output for filename in [key_filename, gid_filename]: os.unlink(filename) return 1
def create_top_level_auth_records(self, hrn): """ Create top level records (includes root and sub authorities (local/remote) """ urn = hrn_to_urn(hrn, 'authority') # make sure parent exists parent_hrn = get_authority(hrn) if not parent_hrn: parent_hrn = hrn if not parent_hrn == hrn: self.create_top_level_auth_records(parent_hrn) # create the authority if it doesnt already exist if not self.AuthHierarchy.auth_exists(urn): self.logger.info("Import: creating top level authorities") self.AuthHierarchy.create_auth(urn) # create the db record if it doesnt already exist auth_info = self.AuthHierarchy.get_auth_info(hrn) table = SfaTable() auth_record = table.find({'type': 'authority', 'hrn': hrn}) if not auth_record: auth_record = SfaRecord(hrn=hrn, gid=auth_info.get_gid_object(), type="authority", pointer=-1) auth_record['authority'] = get_authority(auth_record['hrn']) self.logger.info("Import: inserting authority record for %s"%hrn) table.insert(auth_record)
def fill_record_info(self, records): """ Given a (list of) SFA record, fill in the PLC specific and SFA specific fields in the record. """ if not isinstance(records, list): records = [records] for record in records: if record['type'] == 'user': record = self.fill_user_record_info(record) elif record['type'] == 'slice': record = self.fill_slice_record_info(record) elif record['type'].startswith('authority'): record = self.fill_auth_record_info(record) else: continue record['geni_urn'] = hrn_to_urn(record['hrn'], record['type']) record['geni_certificate'] = record['gid'] #if os_record.created_at is not None: # record['date_created'] = datetime_to_string(utcparse(os_record.created_at)) #if os_record.updated_at is not None: # record['last_updated'] = datetime_to_string(utcparse(os_record.updated_at)) return records
def import_slice(self, parent_hrn, slice): slicename = slice['name'].split("_",1)[-1] slicename = _cleanup_string(slicename) if not slicename: self.logger.error("Import: failed to parse slice name %s" %slice['name']) return hrn = parent_hrn + "." + slicename self.logger.info("Import: slice %s"%hrn) pkey = Keypair(create=True) urn = hrn_to_urn(hrn, 'slice') slice_gid = self.AuthHierarchy.create_gid(urn, create_uuid(), pkey) slice_record = SfaRecord(hrn=hrn, gid=slice_gid, type="slice", pointer=slice['slice_id']) slice_record['authority'] = get_authority(slice_record['hrn']) table = SfaTable() existing_records = table.find({'hrn': hrn, 'type': 'slice', 'pointer': slice['slice_id']}) if not existing_records: table.insert(slice_record) else: self.logger.info("Import: %s exists, updating " % hrn) existing_record = existing_records[0] slice_record['record_id'] = existing_record['record_id'] table.update(slice_record)
def create_interface_records(self): """ Create a record for each SFA interface """ # just create certs for all sfa interfaces even if they # aren't enabled auth_info = self.auth_hierarchy.get_auth_info( self.config.SFA_INTERFACE_HRN) pkey = auth_info.get_pkey_object() hrn = self.config.SFA_INTERFACE_HRN for type in [ 'authority+sa', 'authority+am', 'authority+sm', ]: urn = hrn_to_urn(hrn, type) gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) # for now we have to preserve the authority+<> stuff if self.record_exists(type, hrn): continue interface_record = RegAuthority(type=type, hrn=hrn, gid=gid, authority=get_authority(hrn)) interface_record.just_created() global_dbsession.add(interface_record) global_dbsession.commit() self.logger.info("SfaImporter: imported authority (%s) %s " % (type, interface_record))
def redeem_ticket(self, opts, args): ticket_file = args[0] # get slice hrn from the ticket # use this to get the right slice credential ticket = SfaTicket(filename=ticket_file) ticket.decode() slice_hrn = ticket.gidObject.get_hrn() slice_urn = hrn_to_urn(slice_hrn, 'slice') #slice_hrn = ticket.attributes['slivers'][0]['hrn'] user_cred = self.get_user_cred() slice_cred = self.get_slice_cred(slice_hrn).save_to_string(save_parents=True) # get a list of node hostnames from the RSpec tree = etree.parse(StringIO(ticket.rspec)) root = tree.getroot() hostnames = root.xpath("./network/site/node/hostname/text()") # create an xmlrpc connection to the component manager at each of these # components and gall redeem_ticket connections = {} for hostname in hostnames: try: self.logger.info("Calling redeem_ticket at %(hostname)s " % locals()) server = self.get_server(hostname, CM_PORT, self.key_file, \ self.cert_file, self.options.debug) server.RedeemTicket(ticket.save_to_string(save_parents=True), slice_cred) self.logger.info("Success") except socket.gaierror: self.logger.error("redeem_ticket failed: Component Manager not accepting requests") except Exception, e: self.logger.log_exc(e.message)
def __init__(self, create=False, subject=None, string=None, filename=None, uuid=None, hrn=None, urn=None, lifeDays=1825, email=None): self.uuid = None self.hrn = None self.urn = None self.email = None # for adding to the SubjectAltName Certificate.__init__(self, lifeDays, create, subject, string, filename) if subject: logger.debug("Creating GID for subject: %s" % subject) if uuid: self.uuid = int(uuid) if hrn: self.hrn = hrn self.urn = hrn_to_urn(hrn, 'unknown') if urn: self.urn = urn self.hrn, type = urn_to_hrn(urn) if email: self.set_email(email)
def list_slices(self, creds, options): # look in cache first if self.cache: slices = self.cache.get('slices') if slices: logger.debug("NitosDriver.list_slices returns from cache") return slices # get data from db slices = self.shell.getSlices({}, []) testbed_name = self.testbedInfo['name'] slice_hrns = [ slicename_to_hrn(self.hrn, testbed_name, slice['slice_name']) for slice in slices ] slice_urns = [ hrn_to_urn(slice_hrn, 'slice') for slice_hrn in slice_hrns ] # cache the result if self.cache: logger.debug("NitosDriver.list_slices stores value in cache") self.cache.add('slices', slice_urns) return slice_urns
def parse_resources(text, slice_xrn): resources = [] urn = hrn_to_urn(slice_xrn, 'sliver') plc_slice = re.search("Slice Status => ([^\n]+)", text) if plc_slice.group(1) != 'NONE': res = {} res['geni_urn'] = urn + '_plc_slice' res['geni_error'] = '' res['geni_status'] = 'unknown' if plc_slice.group(1) == 'CREATED': res['geni_status'] = 'ready' resources.append(res) vlans = re.findall("GRI => ([^\n]+)\n\t Status => ([^\n]+)", text) for vlan in vlans: res = {} res['geni_error'] = '' res['geni_urn'] = urn + '_vlan_' + vlan[0] if vlan[1] == 'ACTIVE': res['geni_status'] = 'ready' elif vlan[1] == 'FAILED': res['geni_status'] = 'failed' else: res['geni_status'] = 'configuring' resources.append(res) return resources
def slice_status(api, slice_xrn, creds): urn = hrn_to_urn(slice_xrn, 'slice') result = {} top_level_status = 'unknown' slice_id = get_plc_slice_id(creds, urn) (ret, output) = call_am_apiclient("QuerySliceNetworkClient", [slice_id,], 5) # parse output into rspec XML if output.find("Unkown Rspec:") > 0: top_level_staus = 'failed' result['geni_resources'] = '' else: has_failure = 0 all_active = 0 if output.find("Status => FAILED") > 0: top_level_staus = 'failed' elif ( output.find("Status => ACCEPTED") > 0 or output.find("Status => PENDING") > 0 or output.find("Status => INSETUP") > 0 or output.find("Status => INCREATE") > 0 ): top_level_status = 'configuring' else: top_level_status = 'ready' result['geni_resources'] = parse_resources(output, slice_xrn) result['geni_urn'] = urn result['geni_status'] = top_level_status return result
def get_slice_and_slivers(self, slice_xrn): """ Returns a dict of slivers keyed on the sliver's node_id """ slivers = {} slice = None if not slice_xrn: return (slice, slivers) slice_urn = hrn_to_urn(slice_xrn, 'slice') slice_hrn, _ = urn_to_hrn(slice_xrn) slice_name = hrn_to_unigetestbed_slicename(slice_hrn) slices = self.driver.shell.GetSlices({'slice_name': slice_name}) if not slices: return (slice, slivers) slice = slices[0] # sort slivers by node id slice_nodes = [] if 'node_ids' in slice.keys(): slice_nodes = self.driver.shell.GetNodes( {'node_ids': slice['node_ids']}) for node in slice_nodes: slivers[node['node_id']] = node return (slice, slivers)
def get_slice_and_slivers(self, slice_xrn, login=None): """ Returns a dict of slivers keyed on the sliver's node_id """ slivers = {} sfa_slice = None if not slice_xrn: return (sfa_slice, slivers) slice_urn = hrn_to_urn(slice_xrn, 'slice') slice_hrn, _ = urn_to_hrn(slice_xrn) slice_name = slice_hrn slices = self.driver.GetSlices(slice_filter= str(slice_name), \ slice_filter_type = 'slice_hrn', login=login) logger.debug("Slabaggregate api \tget_slice_and_slivers \ sfa_slice %s \r\n slices %s self.driver.hrn %s" \ %(sfa_slice, slices, self.driver.hrn)) if not slices: return (sfa_slice, slivers) #if isinstance(sfa_slice, list): #sfa_slice = slices[0] #else: #sfa_slice = slices # sort slivers by node id , if there is a job #and therfore, node allocated to this slice for sfa_slice in slices: try: node_ids_list = sfa_slice['node_ids'] except KeyError: logger.log_exc("SLABAGGREGATE \t \ get_slice_and_slivers KeyError ") continue for node in node_ids_list: sliver_xrn = Xrn(slice_urn, type='sliver', id=node) sliver_xrn.set_authority(self.driver.hrn) #node_id = self.driver.root_auth + '.' + node_id sliver = Sliver({'sliver_id':sliver_xrn.urn, 'name': sfa_slice['hrn'], 'type': 'slab-node', 'tags': []}) slivers[node] = sliver #Add default sliver attribute : #connection information for senslab if get_authority (sfa_slice['hrn']) == self.driver.root_auth: tmp = sfa_slice['hrn'].split('.') ldap_username = tmp[1].split('_')[0] vmaddr = 'ssh ' + ldap_username + '@grenoble.senslab.info' slivers['default_sliver'] = {'vm': vmaddr , 'login': ldap_username} #TODO get_slice_and_slivers Find the login of the external user logger.debug("SLABAGGREGATE api get_slice_and_slivers slivers %s "\ %(slivers)) return (slices, slivers)
def list_slices(self, creds, options): slices = self.shell.GetSlices() slice_hrns = [slicename_to_hrn(self.hrn, slice["slice_name"]) for slice in slices] slice_urns = [hrn_to_urn(slice_hrn, "slice") for slice_hrn in slice_hrns] return slice_urns
def import_site(self, hrn, site): shell = self.shell plc_auth = self.plc_auth urn = hrn_to_urn(hrn, 'authority') self.logger.info("Import: site %s"%hrn) # create the authority if not self.AuthHierarchy.auth_exists(urn): self.AuthHierarchy.create_auth(urn) auth_info = self.AuthHierarchy.get_auth_info(urn) table = SfaTable() auth_record = SfaRecord(hrn=hrn, gid=auth_info.get_gid_object(), type="authority", pointer=site['site_id']) auth_record['authority'] = get_authority(auth_record['hrn']) existing_records = table.find({'hrn': hrn, 'type': 'authority', 'pointer': site['site_id']}) if not existing_records: table.insert(auth_record) else: self.logger.info("Import: %s exists, updating " % hrn) existing_record = existing_records[0] auth_record['record_id'] = existing_record['record_id'] table.update(auth_record) return hrn
def parse_resources(self, text, slice_xrn): resources = [] urn = hrn_to_urn(slice_xrn, 'sliver') plc_slice = re.search("Slice Status => ([^\n]+)", text) if plc_slice.group(1) != 'NONE': res = {} res['geni_urn'] = urn + '_plc_slice' res['geni_error'] = '' res['geni_status'] = 'unknown' if plc_slice.group(1) == 'CREATED': res['geni_status'] = 'ready' resources.append(res) vlans = re.findall("GRI => ([^\n]+)\n\t Status => ([^\n]+)", text) for vlan in vlans: res = {} res['geni_error'] = '' res['geni_urn'] = urn + '_vlan_' + vlan[0] if vlan[1] == 'ACTIVE': res['geni_status'] = 'ready' elif vlan[1] == 'FAILED': res['geni_status'] = 'failed' else: res['geni_status'] = 'configuring' resources.append(res) return resources
def fill_record_info(self, records): """ Given a (list of) SFA record, fill in the PLC specific and SFA specific fields in the record. """ if not isinstance(records, list): records = [records] for record in records: if record['type'] == 'user': record = self.fill_user_record_info(record) elif record['type'] == 'slice': record = self.fill_slice_record_info(record) elif record['type'].startswith('authority'): record = self.fill_auth_record_info(record) else: continue record['geni_urn'] = hrn_to_urn(record['hrn'], record['type']) record['geni_certificate'] = record['gid'] #if os_record.created_at is not None: # record['date_created'] = datetime_to_string(utcparse(os_record.created_at)) #if os_record.updated_at is not None: # record['last_updated'] = datetime_to_string(utcparse(os_record.updated_at)) return records
def slice_status(self, api, slice_xrn, creds): urn = hrn_to_urn(slice_xrn, 'slice') result = {} top_level_status = 'unknown' slice_id = self.get_plc_slice_id(creds, urn) (ret, output) = self.call_am_apiclient("QuerySliceNetworkClient", [ slice_id, ], 5) # parse output into rspec XML if output.find("Unkown Rspec:") > 0: top_level_staus = 'failed' result['geni_resources'] = '' else: has_failure = 0 all_active = 0 if output.find("Status => FAILED") > 0: top_level_staus = 'failed' elif (output.find("Status => ACCEPTED") > 0 or output.find("Status => PENDING") > 0 or output.find("Status => INSETUP") > 0 or output.find("Status => INCREATE") > 0): top_level_status = 'configuring' else: top_level_status = 'ready' result['geni_resources'] = self.parse_resources(output, slice_xrn) result['geni_urn'] = urn result['geni_status'] = top_level_status return result
def node_to_rspec_node(self, node, options={}): rspec_node = NodeElement() site=self.driver.testbedInfo rspec_node['component_id'] = hostname_to_urn(self.driver.hrn, site['name'], node['hostname']) rspec_node['component_name'] = node['hostname'] rspec_node['ip'] = node['ip'] rspec_node['protocol'] = node['protocol'] rspec_node['port'] = node['port'] rspec_node['component_manager_id'] = Xrn(self.driver.hrn, 'authority+cm').get_urn() rspec_node['authority_id'] = hrn_to_urn(unigetestbedXrn.site_hrn(self.driver.hrn, site['name']), 'authority+sa') #distinguish between Shared and Reservable nodes rspec_node['exclusive'] = 'false' rspec_node['hardware_types'] = [HardwareType({'name': 'endpoint'}), HardwareType({'name': 'sensor'})] resources = [] for resource in node['resources']: resources.append(Resource({'name':resource.get('name'),'path':resource.get('path'), 'type':resource.get('type'), 'unit':resource.get('unit'), 'data_type':resource.get('datatype')})) rspec_node['resources'] = resources logger.info(rspec_node) if site['longitude'] and site['latitude']: location = Location({'longitude': site['longitude'], 'latitude': site['latitude'], 'country': 'unknown'}) rspec_node['location'] = location logger.info(rspec_node); return rspec_node
def create_special_vini_record(self, interface_hrn): # special case for vini if ".vini" in interface_hrn and interface_hrn.endswith('vini'): # create a fake internet2 site first i2site = { 'name': 'Internet2', 'login_base': 'internet2', 'site_id': -1 } site_hrn = _get_site_hrn(interface_hrn, i2site) # import if hrn is not in list of existing hrns or if the hrn exists # but its not a site record if ( 'authority', site_hrn, ) not in self.records_by_type_hrn: urn = hrn_to_urn(site_hrn, 'authority') if not self.auth_hierarchy.auth_exists(urn): self.auth_hierarchy.create_auth(urn) auth_info = self.auth_hierarchy.get_auth_info(urn) auth_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(), pointer=site['site_id'], authority=get_authority(site_hrn)) auth_record.just_created() global_dbsession.add(auth_record) global_dbsession.commit() self.logger.info( "PlImporter: Imported authority (vini site) %s" % auth_record) self.remember_record(site_record)
def provision(self, urns, options=None): if options is None: options={} # update users slices = PlSlices(self) aggregate = PlAggregate(self) slivers = aggregate.get_slivers(urns) if not slivers: sliver_id_parts = Xrn(urns[0]).get_sliver_id_parts() filter = {} try: filter['slice_id'] = int(sliver_id_parts[0]) except ValueError: filter['name'] = sliver_id_parts[0] slices = self.shell.GetSlices(filter,['hrn']) if not slices: raise Forbidden("Unable to locate slice record for sliver: %s" % xrn) slice = slices[0] slice_urn = hrn_to_urn(slice['hrn'], type='slice') urns = [slice_urn] else: slice_id = slivers[0]['slice_id'] slice_hrn = self.shell.GetSliceHrn(slice_id) slice = self.shell.GetSlices({'slice_id': slice_id})[0] slice['hrn'] = slice_hrn sfa_peer = slices.get_sfa_peer(slice['hrn']) users = options.get('geni_users', []) persons = slices.verify_persons(slice['hrn'], slice, users, sfa_peer, options=options) # update sliver allocation states and set them to geni_provisioned sliver_ids = [sliver['sliver_id'] for sliver in slivers] dbsession=self.api.dbsession() SliverAllocation.set_allocations(sliver_ids, 'geni_provisioned',dbsession) version_manager = VersionManager() rspec_version = version_manager.get_version(options['geni_rspec_version']) return self.describe(urns, rspec_version, options=options)
def get_slice_and_slivers(self, slice_xrn): """ Returns a dict of slivers keyed on the sliver's node_id """ slivers = {} slice = None if not slice_xrn: return (slice, slivers) slice_urn = hrn_to_urn(slice_xrn, 'slice') slice_hrn, _ = urn_to_hrn(slice_xrn) slice_name = hrn_to_nitos_slicename(slice_hrn) slices = self.driver.shell.getSlices({'slice_name': slice_name}, []) #filter results for slc in slices: if slc['slice_name'] == slice_name: slice = slc break if not slice: return (slice, slivers) reserved_nodes = self.driver.shell.getReservedNodes({'slice_id': slice['slice_id']}, []) reserved_node_ids = [] # filter on the slice for node in reserved_nodes: if node['slice_id'] == slice['slice_id']: reserved_node_ids.append(node['node_id']) #get all the nodes all_nodes = self.driver.shell.getNodes({}, []) for node in all_nodes: if node['node_id'] in reserved_node_ids: slivers[node['node_id']] = node return (slice, slivers)
def get_nodes(self, slice_xrn, slice=None,slivers=None, options=None): if slivers is None: slivers={} if options is None: options={} # if we are dealing with a slice that has no node just return # and empty list if slice_xrn: if not slice or not slivers: return [] else: nodes = [slivers[sliver] for sliver in slivers] else: nodes = self.driver.shell.getNodes({}, []) # get the granularity in second for the reservation system grain = self.driver.testbedInfo['grain'] #grain = 1800 rspec_nodes = [] for node in nodes: rspec_node = NodeElement() site_name = self.driver.testbedInfo['name'] rspec_node['component_id'] = hostname_to_urn(self.driver.hrn, site_name, node['hostname']) rspec_node['component_name'] = node['hostname'] rspec_node['component_manager_id'] = Xrn(self.driver.hrn, 'authority+cm').get_urn() rspec_node['authority_id'] = hrn_to_urn(NitosXrn.site_hrn(self.driver.hrn, site_name), 'authority+sa') # do not include boot state (<available> element) in the manifest rspec #if not slice: # rspec_node['boot_state'] = node['boot_state'] rspec_node['exclusive'] = 'true' # site location longitude = self.driver.testbedInfo['longitude'] latitude = self.driver.testbedInfo['latitude'] if longitude and latitude: location = Location({'longitude': longitude, 'latitude': latitude, 'country': 'unknown'}) rspec_node['location'] = location # 3D position position_3d = Position3D({'x': node['position']['X'], 'y': node['position']['Y'], 'z': node['position']['Z']}) #position_3d = Position3D({'x': 1, 'y': 2, 'z': 3}) rspec_node['position_3d'] = position_3d # Granularity granularity = Granularity({'grain': grain}) rspec_node['granularity'] = granularity # HardwareType rspec_node['hardware_type'] = node['node_type'] #rspec_node['hardware_type'] = "orbit" #slivers if node['node_id'] in slivers: # add sliver info sliver = slivers[node['node_id']] rspec_node['sliver_id'] = sliver['node_id'] rspec_node['client_id'] = node['hostname'] rspec_node['slivers'] = [sliver] rspec_nodes.append(rspec_node) return rspec_nodes
def create(self, opts, args): server = self.get_server_from_opts(opts) server_version = self.get_cached_server_version(server) slice_hrn = args[0] slice_urn = hrn_to_urn(slice_hrn, 'slice') user_cred = self.get_user_cred() slice_cred = self.get_slice_cred(slice_hrn).save_to_string(save_parents=True) # delegate the cred to the callers root authority delegated_cred = self.delegate_cred(slice_cred, get_authority(self.authority)+'.slicemanager') #delegated_cred = self.delegate_cred(slice_cred, get_authority(slice_hrn)) #creds.append(delegated_cred) rspec_file = self.get_rspec_file(args[1]) rspec = open(rspec_file).read() # need to pass along user keys to the aggregate. # users = [ # { urn: urn:publicid:IDN+emulab.net+user+alice # keys: [<ssh key A>, <ssh key B>] # }] users = [] slice_records = self.registry.Resolve(slice_urn, [user_cred.save_to_string(save_parents=True)]) if slice_records and 'researcher' in slice_records[0] and slice_records[0]['researcher']!=[]: slice_record = slice_records[0] user_hrns = slice_record['researcher'] user_urns = [hrn_to_urn(hrn, 'user') for hrn in user_hrns] user_records = self.registry.Resolve(user_urns, [user_cred.save_to_string(save_parents=True)]) if 'sfa' not in server_version: users = pg_users_arg(user_records) rspec = RSpec(rspec) rspec.filter({'component_manager_id': server_version['urn']}) rspec = RSpecConverter.to_pg_rspec(rspec.toxml(), content_type='request') creds = [slice_cred] else: users = sfa_users_arg(user_records, slice_record) creds = [slice_cred, delegated_cred] call_args = [slice_urn, creds, rspec, users] if self.server_supports_call_id_arg(server): call_args.append(unique_call_id()) result = server.CreateSliver(*call_args) if opts.file is None: print result else: save_rspec_to_file (result, opts.file) return result
def _get_resources_urn(self, resources_hrn): """ Builds list of resources' urn based on hrn. """ resources_urn = list() for resource in resources_hrn: resources_urn.append(hrn_to_urn(resource, 'node')) return resources_urn
def shutdown(self, opts, args): slice_hrn = args[0] slice_urn = hrn_to_urn(slice_hrn, 'slice') slice_cred = self.get_slice_cred(slice_hrn).save_to_string(save_parents=True) creds = [slice_cred] if opts.delegate: delegated_cred = self.delegate_cred(slice_cred, get_authority(self.authority)) creds.append(delegated_cred) server = self.get_server_from_opts(opts) return server.Shutdown(slice_urn, creds)
def import_sites_and_nodes(self, testbed_shell): """ Gets all the sites and nodes from OAR, process the information, creates hrns and RegAuthority for sites, and feed them to the database. For each site, import the site's nodes to the DB by calling import_nodes. :param testbed_shell: IotlabDriver object, used to have access to testbed_shell methods and fetching info on sites and nodes. :type testbed_shell: IotlabDriver """ sites_listdict = testbed_shell.GetSites() nodes_listdict = testbed_shell.GetNodes() nodes_by_id = dict([(node['node_id'], node) for node in nodes_listdict]) for site in sites_listdict: site_hrn = site['name'] site_record = self.find_record_by_type_hrn('authority', site_hrn) self.logger.info("IotlabImporter: import_sites_and_nodes \ (site) %s \r\n " % site_record) if not site_record: try: urn = hrn_to_urn(site_hrn, 'authority') if not self.auth_hierarchy.auth_exists(urn): self.auth_hierarchy.create_auth(urn) auth_info = self.auth_hierarchy.get_auth_info(urn) site_record = \ RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(), pointer='-1', authority=get_authority(site_hrn)) site_record.just_created() global_dbsession.add(site_record) global_dbsession.commit() self.logger.info("IotlabImporter: imported authority \ (site) %s" % site_record) self.update_just_added_records_dict(site_record) except SQLAlchemyError: # if the site import fails then there is no point in # trying to import the # site's child records(node, slices, persons), so skip them. self.logger.log_exc("IotlabImporter: failed to import \ site. Skipping child records") continue else: # xxx update the record ... pass site_record.stale = False self.import_nodes(site['node_ids'], nodes_by_id, testbed_shell) return
def import_sites_and_nodes(self, testbed_shell): """ Gets all the sites and nodes from OAR, process the information, creates hrns and RegAuthority for sites, and feed them to the database. For each site, import the site's nodes to the DB by calling import_nodes. :param testbed_shell: IotlabDriver object, used to have access to testbed_shell methods and fetching info on sites and nodes. :type testbed_shell: IotlabDriver """ sites_listdict = testbed_shell.GetSites() nodes_listdict = testbed_shell.GetNodes() nodes_by_id = dict([(node['node_id'], node) for node in nodes_listdict]) for site in sites_listdict: site_hrn = site['name'] site_record = self.find_record_by_type_hrn ('authority', site_hrn) self.logger.info("IotlabImporter: import_sites_and_nodes \ (site) %s \r\n " % site_record) if not site_record: try: urn = hrn_to_urn(site_hrn, 'authority') if not self.auth_hierarchy.auth_exists(urn): self.auth_hierarchy.create_auth(urn) auth_info = self.auth_hierarchy.get_auth_info(urn) site_record = \ RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(), pointer='-1', authority=get_authority(site_hrn)) site_record.just_created() global_dbsession.add(site_record) global_dbsession.commit() self.logger.info("IotlabImporter: imported authority \ (site) %s" % site_record) self.update_just_added_records_dict(site_record) except SQLAlchemyError: # if the site import fails then there is no point in # trying to import the # site's child records(node, slices, persons), so skip them. self.logger.log_exc("IotlabImporter: failed to import \ site. Skipping child records") continue else: # xxx update the record ... pass site_record.stale = False self.import_nodes(site['node_ids'], nodes_by_id, testbed_shell) return
def encode(self): if self.urn: urn = self.urn else: urn = hrn_to_urn(self.hrn, None) str = "URI:" + urn if self.uuid: str += ", " + "URI:" + uuid.UUID(int=self.uuid).urn self.set_data(str, 'subjectAltName')
def list_slices (self, creds, options): vct_urns = [] try: vcts = self.repo.list_entities(Vct) #logger.debug("***********************found VCTs: %s " % vcts) for vct in vcts: logger.debug("***********************VCT commonName: %s | urn: %s" %(vct.commonName, hrn_to_urn(vct.commonName,'slice'))) vct_urns.append(hrn_to_urn(vct.commonName,'slice')) except NoEntityFound: logger.error("***********************No VCTs found") logger.debug("***********************VCT_urns: %s" % vct_urns) return vct_urns
def get_leases(self, slice=None, options={}): now = int(time.time()) filter = {} filter.update({'clip': now}) if slice: filter.update({'name': slice['name']}) return_fields = [ 'lease_id', 'hostname', 'site_id', 'name', 't_from', 't_until' ] leases = self.driver.shell.GetLeases(filter) grain = self.driver.shell.GetLeaseGranularity() site_ids = [] for lease in leases: site_ids.append(lease['site_id']) # get sites sites_dict = self.get_sites({'site_id': site_ids}) rspec_leases = [] for lease in leases: rspec_lease = Lease() # xxx how to retrieve site['login_base'] site_id = lease['site_id'] site = sites_dict[site_id] rspec_lease['component_id'] = hrn_to_urn( self.driver.shell.GetNodeHrn(lease['hostname']), 'node') slice_hrn = self.driver.shell.GetSliceHrn(lease['slice_id']) slice_urn = hrn_to_urn(slice_hrn, 'slice') rspec_lease['slice_id'] = slice_urn rspec_lease['start_time'] = lease['t_from'] rspec_lease['duration'] = (lease['t_until'] - lease['t_from']) / grain rspec_leases.append(rspec_lease) return rspec_leases
def __init__(self, create=False, subject=None, string=None, filename=None, uuid=None, hrn=None, urn=None, lifeDays=1825): Certificate.__init__(self, lifeDays, create, subject, string, filename) if subject: logger.debug("Creating GID for subject: %s" % subject) if uuid: self.uuid = int(uuid) if hrn: self.hrn = hrn self.urn = hrn_to_urn(hrn, 'unknown') if urn: self.urn = urn self.hrn, type = urn_to_hrn(urn)
def sfa_client(request, method, hrn=None, urn=None, object_type=None, rspec=None, recursive=False, options=None, platforms=None, output_format=None, admin=False): Config = ConfigParser.ConfigParser() monitor_file = os.path.abspath( os.path.dirname(__file__) + '/../myslice/monitor.ini') Config.read(monitor_file) if admin: user_email, admin_password = config.manifold_admin_user_password() else: #logger.debug(request.session['user']['email']) user_email = request.session['user']['email'] results = dict() if hrn is None: hrn = '' if urn is None: urn = '' if object_type is None: object_type = '' if rspec is None: rspec = '' else: logger.debug("RSPEC = %s" % rspec) if recursive is None: recursive = False if options is None: options = dict() if platforms is None: platforms = list() if method not in ['GetVersion', 'ListResources']: try: if not hrn: hrn = urn_to_hrn(urn) else: urn = hrn_to_urn(hrn, object_type) except Exception, e: logger.error(e) raise Exception, "Provide urn OR hrn + type as parameters of method %s" % method
def node_to_rspec_node(self, node, sites, interfaces, node_tags, pl_initscripts=None, grain=None, options=None): if pl_initscripts is None: pl_initscripts=[] if options is None: options={} rspec_node = NodeElement() # xxx how to retrieve site['login_base'] site=sites[node['site_id']] rspec_node['component_id'] = hostname_to_urn(self.driver.hrn, site['login_base'], node['hostname']) rspec_node['component_name'] = node['hostname'] rspec_node['component_manager_id'] = Xrn(self.driver.hrn, 'authority+cm').get_urn() rspec_node['authority_id'] = hrn_to_urn(PlXrn.site_hrn(self.driver.hrn, site['login_base']), 'authority+sa') # do not include boot state (<available> element) in the manifest rspec rspec_node['boot_state'] = node['boot_state'] if node['boot_state'] == 'boot': rspec_node['available'] = 'true' else: rspec_node['available'] = 'false' #distinguish between Shared and Reservable nodes if node['node_type'] == 'reservable': rspec_node['exclusive'] = 'true' else: rspec_node['exclusive'] = 'false' rspec_node['hardware_types'] = [HardwareType({'name': 'plab-pc'}), HardwareType({'name': 'pc'})] # only doing this because protogeni rspec needs # to advertise available initscripts rspec_node['pl_initscripts'] = pl_initscripts.values() # add site/interface info to nodes. # assumes that sites, interfaces and tags have already been prepared. if site['longitude'] and site['latitude']: location = Location({'longitude': site['longitude'], 'latitude': site['latitude'], 'country': 'unknown'}) rspec_node['location'] = location # Granularity granularity = Granularity({'grain': grain}) rspec_node['granularity'] = granularity rspec_node['interfaces'] = [] if_count=0 for if_id in node['interface_ids']: interface = Interface(interfaces[if_id]) interface['ipv4'] = interface['ip'] interface['component_id'] = PlXrn(auth=self.driver.hrn, interface='node%s:eth%s' % (node['node_id'], if_count)).get_urn() # interfaces in the manifest need a client id if slice: interface['client_id'] = "%s:%s" % (node['node_id'], if_id) rspec_node['interfaces'].append(interface) if_count+=1 tags = [PLTag(node_tags[tag_id]) for tag_id in node['node_tag_ids'] if tag_id in node_tags] rspec_node['tags'] = tags return rspec_node
def delete(self, opts, args): slice_hrn = args[0] slice_urn = hrn_to_urn(slice_hrn, 'slice') slice_cred = self.get_slice_cred(slice_hrn).save_to_string(save_parents=True) creds = [slice_cred] if opts.delegate: delegated_cred = self.delegate_cred(slice_cred, get_authority(self.authority)) creds.append(delegated_cred) server = self.get_server_from_opts(opts) call_args = [slice_urn, creds] if self.server_supports_call_id_arg(server): call_args.append(unique_call_id()) return server.DeleteSliver(*call_args)
def import_slice(self, slice_hrn, slice_record, user_record): """ Create RegSlice record according to the slice hrn if the slice does not exist yet.Creates a relationship with the user record associated with the slice. Commit the record to the database. :param slice_hrn: Human readable name of the slice. :type slice_hrn: string :param slice_record: record of the slice found in the DB, if any. :type slice_record: RegSlice or None :param user_record: user record found in the DB if any. :type user_record: RegUser .. todo::Update the record if a slice record already exists. """ if not slice_record: pkey = Keypair(create=True) urn = hrn_to_urn(slice_hrn, 'slice') slice_gid = \ self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) slice_record = RegSlice(hrn=slice_hrn, gid=slice_gid, pointer='-1', authority=get_authority(slice_hrn)) try: slice_record.just_created() global_dbsession.add(slice_record) global_dbsession.commit() self.update_just_added_records_dict(slice_record) except SQLAlchemyError: self.logger.log_exc("IotlabImporter: failed to import slice") #No slice update upon import in iotlab else: # xxx update the record ... self.logger.warning("Iotlab Slice update not implemented") # record current users affiliated with the slice slice_record.reg_researchers = [user_record] try: global_dbsession.commit() slice_record.stale = False except SQLAlchemyError: self.logger.log_exc("IotlabImporter: failed to update slice")
def import_slice(self, slice_hrn, slice_record, user_record): """ Create RegSlice record according to the slice hrn if the slice does not exist yet.Creates a relationship with the user record associated with the slice. Commit the record to the database. :param slice_hrn: Human readable name of the slice. :type slice_hrn: string :param slice_record: record of the slice found in the DB, if any. :type slice_record: RegSlice or None :param user_record: user record found in the DB if any. :type user_record: RegUser .. todo::Update the record if a slice record already exists. """ if not slice_record: pkey = Keypair(create=True) urn = hrn_to_urn(slice_hrn, 'slice') slice_gid = \ self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) slice_record = RegSlice(hrn=slice_hrn, gid=slice_gid, pointer='-1', authority=get_authority(slice_hrn)) try: slice_record.just_created() global_dbsession.add(slice_record) global_dbsession.commit() self.update_just_added_records_dict(slice_record) except SQLAlchemyError: self.logger.log_exc("IotlabImporter: failed to import slice") #No slice update upon import in iotlab else: # xxx update the record ... self.logger.warning("Iotlab Slice update not implemented") # record current users affiliated with the slice slice_record.reg_researchers = [user_record] try: global_dbsession.commit() slice_record.stale = False except SQLAlchemyError: self.logger.log_exc("IotlabImporter: failed to update slice")
def status(self, opts, args): slice_hrn = args[0] slice_urn = hrn_to_urn(slice_hrn, 'slice') slice_cred = self.get_slice_cred(slice_hrn).save_to_string(save_parents=True) creds = [slice_cred] if opts.delegate: delegated_cred = self.delegate_cred(slice_cred, get_authority(self.authority)) creds.append(delegated_cred) server = self.get_server_from_opts(opts) call_args = [slice_urn, creds] if self.server_supports_call_id_arg(server): call_args.append(unique_call_id()) result = server.SliverStatus(*call_args) print result if opts.file: save_variable_to_file(result, opts.file, opts.fileformat)
def node_to_rspec_node(self, node, options={}): rspec_node = NodeElement() site=self.driver.testbedInfo rspec_node['component_id'] = hostname_to_urn(self.driver.hrn, site['name'], node['hostname']) rspec_node['component_name'] = node['hostname'] rspec_node['component_manager_id'] = Xrn(self.driver.hrn, 'authority+cm').get_urn() rspec_node['authority_id'] = hrn_to_urn(DummyXrn.site_hrn(self.driver.hrn, site['name']), 'authority+sa') #distinguish between Shared and Reservable nodes rspec_node['exclusive'] = 'false' rspec_node['hardware_types'] = [HardwareType({'name': 'dummy-pc'}), HardwareType({'name': 'pc'})] if site['longitude'] and site['latitude']: location = Location({'longitude': site['longitude'], 'latitude': site['latitude'], 'country': 'unknown'}) rspec_node['location'] = location return rspec_node
def node_to_rspec_node(self, node, options={}): rspec_node = NodeElement() site = self.driver.testbedInfo rspec_node['component_id'] = hostname_to_urn(self.driver.hrn, site['name'], node['hostname']) rspec_node['component_name'] = node['hostname'] rspec_node['ip'] = node['ip'] rspec_node['protocol'] = node['protocol'] rspec_node['port'] = node['port'] rspec_node['component_manager_id'] = Xrn(self.driver.hrn, 'authority+cm').get_urn() rspec_node['authority_id'] = hrn_to_urn( unigetestbedXrn.site_hrn(self.driver.hrn, site['name']), 'authority+sa') #distinguish between Shared and Reservable nodes rspec_node['exclusive'] = 'false' rspec_node['hardware_types'] = [ HardwareType({'name': 'endpoint'}), HardwareType({'name': 'sensor'}) ] resources = [] for resource in node['resources']: resources.append( Resource({ 'name': resource.get('name'), 'path': resource.get('path'), 'type': resource.get('type'), 'unit': resource.get('unit'), 'data_type': resource.get('datatype') })) rspec_node['resources'] = resources logger.info(rspec_node) if site['longitude'] and site['latitude']: location = Location({ 'longitude': site['longitude'], 'latitude': site['latitude'], 'country': 'unknown' }) rspec_node['location'] = location logger.info(rspec_node) return rspec_node
def get_ticket(self, opts, args): slice_hrn, rspec_path = args[0], args[1] slice_urn = hrn_to_urn(slice_hrn, 'slice') user_cred = self.get_user_cred() slice_cred = self.get_slice_cred(slice_hrn).save_to_string(save_parents=True) creds = [slice_cred] if opts.delegate: delegated_cred = self.delegate_cred(slice_cred, get_authority(self.authority)) creds.append(delegated_cred) rspec_file = self.get_rspec_file(rspec_path) rspec = open(rspec_file).read() server = self.get_server_from_opts(opts) ticket_string = server.GetTicket(slice_urn, creds, rspec, []) file = os.path.join(self.options.sfi_dir, get_leaf(slice_hrn) + ".ticket") self.logger.info("writing ticket to %s"%file) ticket = SfaTicket(string=ticket_string) ticket.save_to_file(filename=file, save_parents=True)
def get_credentials(self): # Getting the list of slices in which user_hrn is a researcher user_cred = self.my_credential_string records = self.registry.Resolve(hrn_to_urn(self.user_hrn, 'user'), user_cred) if not records: raise Exception, "Cannot retrieve slice information for %s" % self.user_hrn record = records[0] slices = record['reg-slices'] creds = [] #c = { # 'target': self.user_hrn, # 'type': 'user', # 'cred': self.delegate('user', self.user_hrn) #} c = self.delegate('user', self.user_hrn) creds.append(c) try: user_auth = get_authority(self.user_hrn) #c = { # 'target': user_auth, # 'type': 'authority', # 'cred': self.delegate('authority', user_auth) #} c = self.delegate('authority', user_auth) creds.append(c) except Exception: print "I: No authority credential." for s in slices: #c = { # 'target': s, # 'type': 'slice', # 'cred': self.delegate('slice', s) #} c = self.delegate('slice', s) creds.append(c) return creds
def create_auth(self, xrn, create_parents=False): hrn, type = urn_to_hrn(str(xrn)) logger.debug("Hierarchy: creating authority: %s"% hrn) # create the parent authority if necessary parent_hrn = get_authority(hrn) parent_urn = hrn_to_urn(parent_hrn, 'authority') if (parent_hrn) and (not self.auth_exists(parent_urn)) and (create_parents): self.create_auth(parent_urn, create_parents) (directory, gid_filename, privkey_filename,) = \ self.get_auth_filenames(hrn) # create the directory to hold the files try: os.makedirs(directory) # if the path already exists then pass except OSError, (errno, strerr): if errno == 17: pass
def create_sm_client_record(self): """ Create a user record for the Slicemanager service. """ hrn = self.interface_hrn + '.slicemanager' urn = hrn_to_urn(hrn, 'user') if not self.auth_hierarchy.auth_exists(urn): self.logger.info("SfaImporter: creating Slice Manager user") self.auth_hierarchy.create_auth(urn) if self.record_exists('user', hrn): return auth_info = self.auth_hierarchy.get_auth_info(hrn) user_record = RegUser(hrn=hrn, gid=auth_info.get_gid_object(), authority=get_authority(hrn)) user_record.just_created() global_dbsession.add(user_record) global_dbsession.commit() self.logger.info("SfaImporter: importing user (slicemanager) %s " % user_record)
def provision(self, urns, options={}): # update users slices = PlSlices(self) aggregate = PlAggregate(self) slivers = aggregate.get_slivers(urns) if not slivers: sliver_id_parts = Xrn(urns[0]).get_sliver_id_parts() filter = {} try: filter['slice_id'] = int(sliver_id_parts[0]) except ValueError: filter['name'] = sliver_id_parts[0] slices = self.shell.GetSlices(filter, ['hrn']) if not slices: raise Forbidden( "Unable to locate slice record for sliver: %s" % xrn) slice = slices[0] slice_urn = hrn_to_urn(slice['hrn'], type='slice') urns = [slice_urn] else: slice_id = slivers[0]['slice_id'] slice_hrn = self.shell.GetSliceHrn(slice_id) slice = self.shell.GetSlices({'slice_id': slice_id})[0] slice['hrn'] = slice_hrn sfa_peer = slices.get_sfa_peer(slice['hrn']) users = options.get('geni_users', []) persons = slices.verify_persons(slice['hrn'], slice, users, sfa_peer, options=options) # update sliver allocation states and set them to geni_provisioned sliver_ids = [sliver['sliver_id'] for sliver in slivers] dbsession = self.api.dbsession() SliverAllocation.set_allocations(sliver_ids, 'geni_provisioned', dbsession) version_manager = VersionManager() rspec_version = version_manager.get_version( options['geni_rspec_version']) return self.describe(urns, rspec_version, options=options)
def create_gid(self, xrn, uuid, pkey, CA=False, email=None): hrn, type = urn_to_hrn(xrn) if not type: type = 'authority' parent_hrn = get_authority(hrn) # Using hrn_to_urn() here to make sure the urn is in the right format # If xrn was a hrn instead of a urn, then the gid's urn will be # of type None urn = hrn_to_urn(hrn, type) gid = GID(subject=hrn, uuid=uuid, hrn=hrn, urn=urn, email=email) # is this a CA cert if hrn == self.config.SFA_INTERFACE_HRN or not parent_hrn: # root or sub authority gid.set_intermediate_ca(True) elif type and 'authority' in type: # authority type gid.set_intermediate_ca(True) elif CA: gid.set_intermediate_ca(True) else: gid.set_intermediate_ca(False) # set issuer if not parent_hrn or hrn == self.config.SFA_INTERFACE_HRN: # if there is no parent hrn, then it must be self-signed. this # is where we terminate the recursion gid.set_issuer(pkey, hrn) else: # we need the parent's private key in order to sign this GID parent_auth_info = self.get_auth_info(parent_hrn) gid.set_issuer(parent_auth_info.get_pkey_object(), parent_auth_info.hrn) gid.set_parent(parent_auth_info.get_gid_object()) gid.set_pubkey(pkey) gid.encode() gid.sign() return gid
def get_slice_and_slivers(self, slice_xrn): """ Returns a dict of slivers keyed on the sliver's node_id """ slivers = {} slice = None if not slice_xrn: return (slice, slivers) slice_urn = hrn_to_urn(slice_xrn, 'slice') slice_hrn, _ = urn_to_hrn(slice_xrn) slice_name = hrn_to_nitos_slicename(slice_hrn) slices = self.driver.shell.getSlices({'slice_name': slice_name}, []) #filter results for slc in slices: if slc['slice_name'] == slice_name: slice = slc break if not slice: return (slice, slivers) reserved_nodes = self.driver.shell.getReservedNodes( {'slice_id': slice['slice_id']}, []) reserved_node_ids = [] # filter on the slice for node in reserved_nodes: if node['slice_id'] == slice['slice_id']: reserved_node_ids.append(node['node_id']) #get all the nodes all_nodes = self.driver.shell.getNodes({}, []) for node in all_nodes: if node['node_id'] in reserved_node_ids: slivers[node['node_id']] = node return (slice, slivers)
def import_persons_and_slices(self, testbed_shell): """ Gets user data from LDAP, process the information. Creates hrn for the user's slice, the user's gid, creates the RegUser record associated with user. Creates the RegKey record associated nwith the user's key. Saves those records into the SFA DB. import the user's slice onto the database as well by calling import_slice. :param testbed_shell: IotlabDriver object, used to have access to testbed_shell attributes. :type testbed_shell: IotlabDriver .. warning:: does not support multiple keys per user """ ldap_person_listdict = testbed_shell.GetPersons() self.logger.info("IOTLABIMPORT \t ldap_person_listdict %s \r\n" % (ldap_person_listdict)) # import persons for person in ldap_person_listdict: self.logger.info("IotlabImporter: person :" % (person)) if 'ssh-rsa' not in person['pkey']: #people with invalid ssh key (ssh-dss, empty, bullshit keys...) #won't be imported continue person_hrn = person['hrn'] slice_hrn = self.slicename_to_hrn(person['hrn']) # xxx suspicious again if len(person_hrn) > 64: person_hrn = person_hrn[:64] person_urn = hrn_to_urn(person_hrn, 'user') self.logger.info("IotlabImporter: users_rec_by_email %s " % (self.users_rec_by_email)) #Check if user using person['email'] from LDAP is already registered #in SFA. One email = one person. In this case, do not create another #record for this person #person_hrn returned by GetPerson based on iotlab root auth + #uid ldap user_record = self.find_record_by_type_hrn('user', person_hrn) if not user_record and person['email'] in self.users_rec_by_email: user_record = self.users_rec_by_email[person['email']] person_hrn = user_record.hrn person_urn = hrn_to_urn(person_hrn, 'user') slice_record = self.find_record_by_type_hrn('slice', slice_hrn) iotlab_key = person['pkey'] # new person if not user_record: (pubkey, pkey) = self.init_person_key(person, iotlab_key) if pubkey is not None and pkey is not None: person_gid = \ self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey) if person['email']: self.logger.debug("IOTLAB IMPORTER \ PERSON EMAIL OK email %s " % (person['email'])) person_gid.set_email(person['email']) user_record = \ RegUser(hrn=person_hrn, gid=person_gid, pointer='-1', authority=get_authority(person_hrn), email=person['email']) else: user_record = \ RegUser(hrn=person_hrn, gid=person_gid, pointer='-1', authority=get_authority(person_hrn)) if pubkey: user_record.reg_keys = [RegKey(pubkey)] else: self.logger.warning("No key found for user %s" % (user_record)) try: user_record.just_created() global_dbsession.add(user_record) global_dbsession.commit() self.logger.info("IotlabImporter: imported person \ %s" % (user_record)) self.update_just_added_records_dict(user_record) except SQLAlchemyError: self.logger.log_exc("IotlabImporter: \ failed to import person %s" % (person)) else: # update the record ? # if user's primary key has changed then we need to update # the users gid by forcing an update here sfa_keys = user_record.reg_keys new_key = False if iotlab_key is not sfa_keys: new_key = True if new_key: self.logger.info("IotlabImporter: \t \t USER UPDATE \ person: %s" % (person['hrn'])) (pubkey, pkey) = self.init_person_key(person, iotlab_key) person_gid = \ self.auth_hierarchy.create_gid(person_urn, create_uuid(), pkey) if not pubkey: user_record.reg_keys = [] else: user_record.reg_keys = [RegKey(pubkey)] self.logger.info("IotlabImporter: updated person: %s" % (user_record)) if person['email']: user_record.email = person['email'] try: global_dbsession.commit() user_record.stale = False except SQLAlchemyError: self.logger.log_exc("IotlabImporter: \ failed to update person %s" % (person)) self.import_slice(slice_hrn, slice_record, user_record)
def get_slivers(self, urns, options={}): """Get slivers of the given slice urns. Slivers contains slice, node and user information. For Iotlab, returns the leases with sliver ids and their allocation status. :param urns: list of slice urns. :type urns: list of strings :param options: unused :type options: unused .. seealso:: http://groups.geni.net/geni/wiki/GAPI_AM_API_V3/CommonConcepts#urns """ slice_ids = set() node_ids = [] for urn in urns: xrn = IotlabXrn(xrn=urn) if xrn.type == 'sliver': # id: slice_id-node_id try: sliver_id_parts = xrn.get_sliver_id_parts() slice_id = int(sliver_id_parts[0]) node_id = int(sliver_id_parts[1]) slice_ids.add(slice_id) node_ids.append(node_id) except ValueError: pass else: slice_names = set() slice_names.add(xrn.hrn) logger.debug("CortexlabAggregate \t get_slivers urns %s slice_ids %s \ node_ids %s\r\n" % (urns, slice_ids, node_ids)) logger.debug("CortexlabAggregate \t get_slivers xrn %s slice_names %s \ \r\n" % (xrn, slice_names)) filter_sliver = {} if slice_names: filter_sliver['slice_hrn'] = list(slice_names) slice_hrn = filter_sliver['slice_hrn'][0] slice_filter_type = 'slice_hrn' # if slice_ids: # filter['slice_id'] = list(slice_ids) # # get slices if slice_hrn: slices = self.driver.GetSlices(slice_hrn, slice_filter_type) leases = self.driver.GetLeases({'slice_hrn': slice_hrn}) logger.debug("CortexlabAggregate \t get_slivers \ slices %s leases %s\r\n" % (slices, leases)) if not slices: return [] single_slice = slices[0] # get sliver users user = single_slice['reg_researchers'][0].__dict__ logger.debug("CortexlabAggregate \t get_slivers user %s \ \r\n" % (user)) # construct user key info person = self.driver.testbed_shell.ldap.LdapFindUser(record=user) logger.debug("CortexlabAggregate \t get_slivers person %s \ \r\n" % (person)) # name = person['last_name'] user['login'] = person['uid'] user['user_urn'] = hrn_to_urn(user['hrn'], 'user') user['keys'] = person['pkey'] try: node_ids = single_slice['node_ids'] node_list = self.driver.testbed_shell.GetNodes( {'hostname': single_slice['node_ids']}) node_by_hostname = dict([(node['hostname'], node) for node in node_list]) except KeyError: logger.warning("\t get_slivers No slivers in slice") # slice['node_ids'] = node_ids # nodes_dict = self.get_slice_nodes(slice, options) slivers = [] for current_lease in leases: for hostname in current_lease['reserved_nodes']: node = {} node['slice_id'] = current_lease['slice_id'] node['slice_hrn'] = current_lease['slice_hrn'] slice_name = current_lease['slice_hrn'].split(".")[1] node['slice_name'] = slice_name index = current_lease['reserved_nodes'].index(hostname) node_id = current_lease['resource_ids'][index] # node['slice_name'] = user['login'] # node.update(single_slice) more_info = node_by_hostname[hostname] node.update(more_info) # oar_job_id is the slice_id (lease_id) sliver_hrn = '%s.%s-%s' % (self.driver.hrn, current_lease['lease_id'], node_id) node['node_id'] = node_id node['expires'] = current_lease['t_until'] node['sliver_id'] = Xrn(sliver_hrn, type='sliver').urn node['urn'] = node['sliver_id'] node['services_user'] = [user] slivers.append(node) return slivers
def get_slice_and_slivers(self, slice_xrn, login=None): """ Get the slices and the associated leases if any, from the cortexlab testbed. One slice can have mutliple leases. For each slice, get the nodes in the associated lease and create a sliver with the necessary info and insert it into the sliver dictionary, keyed on the node hostnames. Returns a dict of slivers based on the sliver's node_id. Called by get_rspec. :param slice_xrn: xrn of the slice :param login: user's login on cortexlab ldap :type slice_xrn: string :type login: string :returns: a list of slices dict and a list of Sliver object :rtype: (list, list) .. note: There is no real slivers in cortexlab, only leases. The goal is to be consistent with the SFA standard. """ slivers = {} sfa_slice = None if slice_xrn is None: return (sfa_slice, slivers) slice_urn = hrn_to_urn(slice_xrn, 'slice') slice_hrn, _ = urn_to_hrn(slice_xrn) # GetSlices always returns a list, even if there is only one element slices = self.driver.GetSlices(slice_filter=str(slice_hrn), slice_filter_type='slice_hrn', login=login) logger.debug("CortexlabAggregate api \tget_slice_and_slivers \ slice_hrn %s \r\n slices %s self.driver.hrn %s" % (slice_hrn, slices, self.driver.hrn)) if slices == []: return (sfa_slice, slivers) # sort slivers by node id , if there is a job #and therefore, node allocated to this slice # for sfa_slice in slices: sfa_slice = slices[0] try: node_ids_list = sfa_slice['node_ids'] except KeyError: logger.log_exc("CORTEXLABAGGREGATE \t \ get_slice_and_slivers No nodes in the slice \ - KeyError ") node_ids_list = [] # continue for node in node_ids_list: sliver_xrn = Xrn(slice_urn, type='sliver', id=node) sliver_xrn.set_authority(self.driver.hrn) sliver = Sliver({ 'sliver_id': sliver_xrn.urn, 'name': sfa_slice['hrn'], 'type': 'cortexlab-node', 'tags': [] }) slivers[node] = sliver #Add default sliver attribute : #connection information for cortexlab, assuming it is the same ssh # connection process # look in ldap: ldap_username = self.find_ldap_username_from_slice(sfa_slice) if ldap_username is not None: ssh_access = None slivers['default_sliver'] = { 'ssh': ssh_access, 'login': ldap_username } logger.debug( "CORTEXLABAGGREGATE api get_slice_and_slivers slivers %s " % (slivers)) return (slices, slivers)
def node_to_rspec_node(self, node): """ Creates a rspec node structure with the appropriate information based on the node information that can be found in the node dictionary. :param node: node data. this dict contains information about the node and must have the following keys : mobile, radio, archi, hostname, boot_state, site, x, y ,z (position). :type node: dictionary. :returns: node dictionary containing the following keys : mobile, archi, radio, component_id, component_name, component_manager_id, authority_id, boot_state, exclusive, hardware_types, location, position, granularity, tags. :rtype: dict """ grain = self.driver.testbed_shell.GetLeaseGranularity() rspec_node = NodeElement() # xxx how to retrieve site['login_base'] #site_id=node['site_id'] #site=sites_dict[site_id] rspec_node['mobile'] = node['mobile'] rspec_node['archi'] = node['archi'] rspec_node['radio'] = node['radio'] cortexlab_xrn = xrn_object(self.driver.testbed_shell.root_auth, node['hostname']) rspec_node['component_id'] = cortexlab_xrn.urn rspec_node['component_name'] = node['hostname'] rspec_node['component_manager_id'] = \ hrn_to_urn(self.driver.testbed_shell.root_auth, 'authority+sa') # Iotlab's nodes are federated : there is only one authority # for all Iotlab sites, registered in SFA. # Removing the part including the site # in authority_id SA 27/07/12 rspec_node['authority_id'] = rspec_node['component_manager_id'] # do not include boot state (<available> element) #in the manifest rspec rspec_node['boot_state'] = node['boot_state'] # if node['hostname'] in reserved_nodes: # rspec_node['boot_state'] = "Reserved" rspec_node['exclusive'] = 'true' rspec_node['hardware_types'] = [HardwareType({'name': \ 'iotlab-node'})] location = IotlabLocation({'country':'France', 'site': \ node['site']}) rspec_node['location'] = location position = IotlabPosition() for field in position: try: position[field] = node[field] except KeyError, error: logger.log_exc("Cortexlabaggregate\t node_to_rspec_node \ position %s " % (error))
def get_leases_and_channels(self, slice=None, slice_xrn=None, options={}): slices = self.driver.shell.getSlices({}, []) nodes = self.driver.shell.getNodes({}, []) leases = self.driver.shell.getReservedNodes({}, []) channels = self.driver.shell.getChannels({}, []) reserved_channels = self.driver.shell.getReservedChannels() grain = self.driver.testbedInfo['grain'] if slice_xrn and not slice: return ([], []) if slice: all_leases = [] all_leases.extend(leases) all_reserved_channels = [] all_reserved_channels.extend(reserved_channels) for lease in all_leases: if lease['slice_id'] != slice['slice_id']: leases.remove(lease) for channel in all_reserved_channels: if channel['slice_id'] != slice['slice_id']: reserved_channels.remove(channel) rspec_channels = [] for channel in reserved_channels: rspec_channel = {} #retrieve channel number for chl in channels: if chl['channel_id'] == channel['channel_id']: channel_number = chl['channel'] break rspec_channel['channel_num'] = channel_number rspec_channel['start_time'] = channel['start_time'] rspec_channel['duration'] = (int(channel['end_time']) - int( channel['start_time'])) / int(grain) rspec_channel['component_id'] = channel_to_urn( self.driver.hrn, self.driver.testbedInfo['name'], channel_number) # retreive slicename for slc in slices: if slc['slice_id'] == channel['slice_id']: slicename = slc['slice_name'] break if slice_xrn: slice_urn = slice_xrn slice_hrn = urn_to_hrn(slice_urn) else: slice_hrn = slicename_to_hrn(self.driver.hrn, self.driver.testbedInfo['name'], slicename) slice_urn = hrn_to_urn(slice_hrn, 'slice') rspec_channel['slice_id'] = slice_urn rspec_channels.append(rspec_channel) rspec_leases = [] for lease in leases: rspec_lease = Lease() rspec_lease['lease_id'] = lease['reservation_id'] # retreive node name for node in nodes: if node['node_id'] == lease['node_id']: nodename = node['hostname'] break rspec_lease['component_id'] = hostname_to_urn( self.driver.hrn, self.driver.testbedInfo['name'], nodename) # retreive slicename for slc in slices: if slc['slice_id'] == lease['slice_id']: slicename = slc['slice_name'] break if slice_xrn: slice_urn = slice_xrn slice_hrn = urn_to_hrn(slice_urn) else: slice_hrn = slicename_to_hrn(self.driver.hrn, self.driver.testbedInfo['name'], slicename) slice_urn = hrn_to_urn(slice_hrn, 'slice') rspec_lease['slice_id'] = slice_urn rspec_lease['start_time'] = lease['start_time'] rspec_lease['duration'] = (int(lease['end_time']) - int(lease['start_time'])) / int(grain) rspec_leases.append(rspec_lease) return (rspec_leases, rspec_channels)
def get_nodes(self, slice_xrn, slice=None, slivers={}, options={}): # if we are dealing with a slice that has no node just return # and empty list if slice_xrn: if not slice or not slivers: return [] else: nodes = [slivers[sliver] for sliver in slivers] else: nodes = self.driver.shell.getNodes({}, []) # get the granularity in second for the reservation system grain = self.driver.testbedInfo['grain'] #grain = 1800 rspec_nodes = [] for node in nodes: rspec_node = NodeElement() site_name = self.driver.testbedInfo['name'] rspec_node['component_id'] = hostname_to_urn( self.driver.hrn, site_name, node['hostname']) rspec_node['component_name'] = node['hostname'] rspec_node['component_manager_id'] = Xrn(self.driver.hrn, 'authority+cm').get_urn() rspec_node['authority_id'] = hrn_to_urn( NitosXrn.site_hrn(self.driver.hrn, site_name), 'authority+sa') # do not include boot state (<available> element) in the manifest rspec #if not slice: # rspec_node['boot_state'] = node['boot_state'] rspec_node['exclusive'] = 'true' # site location longitude = self.driver.testbedInfo['longitude'] latitude = self.driver.testbedInfo['latitude'] if longitude and latitude: location = Location({ 'longitude': longitude, 'latitude': latitude, 'country': 'unknown' }) rspec_node['location'] = location # 3D position position_3d = Position3D({ 'x': node['position']['X'], 'y': node['position']['Y'], 'z': node['position']['Z'] }) #position_3d = Position3D({'x': 1, 'y': 2, 'z': 3}) rspec_node['position_3d'] = position_3d # Granularity granularity = Granularity({'grain': grain}) rspec_node['granularity'] = granularity # HardwareType rspec_node['hardware_type'] = node['node_type'] #rspec_node['hardware_type'] = "orbit" #slivers if node['node_id'] in slivers: # add sliver info sliver = slivers[node['node_id']] rspec_node['sliver_id'] = sliver['node_id'] rspec_node['client_id'] = node['hostname'] rspec_node['slivers'] = [sliver] rspec_nodes.append(rspec_node) return rspec_nodes
def run (self, options): config = Config () interface_hrn = config.SFA_INTERFACE_HRN root_auth = config.SFA_REGISTRY_ROOT_AUTH shell = DummyShell (config) ######## retrieve all existing SFA objects all_records = global_dbsession.query(RegRecord).all() # create hash by (type,hrn) # we essentially use this to know if a given record is already known to SFA self.records_by_type_hrn = \ dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] ) # create hash by (type,pointer) self.records_by_type_pointer = \ dict ( [ ( (record.type, record.pointer) , record ) for record in all_records if record.pointer != -1] ) # initialize record.stale to True by default, then mark stale=False on the ones that are in use for record in all_records: record.stale=True # DEBUG #all_records = global_dbsession.query(RegRecord).all() #for record in all_records: print record ######## retrieve Dummy TB data # Get all plc sites # retrieve only required stuf sites = [shell.GetTestbedInfo()] print "sites: " + sites # create a hash of sites by login_base # sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] ) # Get all dummy TB users users = shell.GetUsers() # create a hash of users by user_id users_by_id = dict ( [ ( user['user_id'], user) for user in users ] ) # Get all dummy TB public keys keys = [] for user in users: if 'keys' in user: keys.extend(user['keys']) # create a dict user_id -> [ keys ] keys_by_person_id = {} for user in users: if 'keys' in user: keys_by_person_id[user['user_id']] = user['keys'] # Get all dummy TB nodes nodes = shell.GetNodes() # create hash by node_id nodes_by_id = dict ( [ ( node['node_id'], node, ) for node in nodes ] ) # Get all dummy TB slices slices = shell.GetSlices() # create hash by slice_id slices_by_id = dict ( [ (slice['slice_id'], slice ) for slice in slices ] ) # start importing print " STARTING FOR SITES" for site in sites: site_hrn = _get_site_hrn(interface_hrn, site) # import if hrn is not in list of existing hrns or if the hrn exists # but its not a site record site_record=self.locate_by_type_hrn ('authority', site_hrn) print site_hrn print site_record if not site_record: try: print "TRY TO CREATE SITE RECORD" urn = hrn_to_urn(site_hrn, 'authority') if not self.auth_hierarchy.auth_exists(urn): print "create auth "+urn self.auth_hierarchy.create_auth(urn) auth_info = self.auth_hierarchy.get_auth_info(urn) site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(), pointer= -1, authority=get_authority(site_hrn)) site_record.just_created() print "urn: "+urn print "auth_info: " + auth_info print site_record global_dbsession.add(site_record) global_dbsession.commit() self.logger.info("DummyImporter: imported authority (site) : %s" % site_record) self.remember_record (site_record) except: # if the site import fails then there is no point in trying to import the # site's child records (node, slices, persons), so skip them. self.logger.log_exc("DummyImporter: failed to import site. Skipping child records") continue else: # xxx update the record ... pass site_record.stale=False # import node records for node in nodes: site_auth = get_authority(site_hrn) site_name = site['name'] node_hrn = hostname_to_hrn(site_auth, site_name, node['hostname']) # xxx this sounds suspicious if len(node_hrn) > 64: node_hrn = node_hrn[:64] node_record = self.locate_by_type_hrn ( 'node', node_hrn ) if not node_record: try: pkey = Keypair(create=True) urn = hrn_to_urn(node_hrn, 'node') node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) node_record = RegNode (hrn=node_hrn, gid=node_gid, pointer =node['node_id'], authority=get_authority(node_hrn)) node_record.just_created() global_dbsession.add(node_record) global_dbsession.commit() self.logger.info("DummyImporter: imported node: %s" % node_record) self.remember_record (node_record) except: self.logger.log_exc("DummyImporter: failed to import node") else: # xxx update the record ... pass node_record.stale=False all_records = global_dbsession.query(RegRecord).all() for record in all_records: print record site_pis=[] # import users for user in users: user_hrn = email_to_hrn(site_hrn, user['email']) # xxx suspicious again if len(user_hrn) > 64: user_hrn = user_hrn[:64] user_urn = hrn_to_urn(user_hrn, 'user') user_record = self.locate_by_type_hrn ( 'user', user_hrn) # return a tuple pubkey (a dummy TB key object) and pkey (a Keypair object) def init_user_key (user): pubkey = None pkey = None if user['keys']: # randomly pick first key in set for key in user['keys']: pubkey = key try: pkey = convert_public_key(pubkey) break except: continue if not pkey: self.logger.warn('DummyImporter: unable to convert public key for %s' % user_hrn) pkey = Keypair(create=True) else: # the user has no keys. Creating a random keypair for the user's gid self.logger.warn("DummyImporter: user %s does not have a NITOS public key"%user_hrn) pkey = Keypair(create=True) return (pubkey, pkey) # new user try: if not user_record: (pubkey,pkey) = init_user_key (user) user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey) user_gid.set_email(user['email']) user_record = RegUser (hrn=user_hrn, gid=user_gid, pointer=user['user_id'], authority=get_authority(user_hrn), email=user['email']) if pubkey: user_record.reg_keys=[RegKey (pubkey)] else: self.logger.warning("No key found for user %s"%user_record) user_record.just_created() global_dbsession.add (user_record) global_dbsession.commit() self.logger.info("DummyImporter: imported person: %s" % user_record) self.remember_record ( user_record ) else: # update the record ? # if user's primary key has changed then we need to update the # users gid by forcing an update here sfa_keys = user_record.reg_keys def key_in_list (key,sfa_keys): for reg_key in sfa_keys: if reg_key.key==key: return True return False # is there a new key in Dummy TB ? new_keys=False for key in user['keys']: if not key_in_list (key,sfa_keys): new_keys = True if new_keys: (pubkey,pkey) = init_user_key (user) user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey) if not pubkey: user_record.reg_keys=[] else: user_record.reg_keys=[ RegKey (pubkey)] self.logger.info("DummyImporter: updated person: %s" % user_record) user_record.email = user['email'] global_dbsession.commit() user_record.stale=False except: self.logger.log_exc("DummyImporter: failed to import user %d %s"%(user['user_id'],user['email'])) # import slices for slice in slices: slice_hrn = slicename_to_hrn(site_hrn, slice['slice_name']) slice_record = self.locate_by_type_hrn ('slice', slice_hrn) if not slice_record: try: pkey = Keypair(create=True) urn = hrn_to_urn(slice_hrn, 'slice') slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey) slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid, pointer=slice['slice_id'], authority=get_authority(slice_hrn)) slice_record.just_created() global_dbsession.add(slice_record) global_dbsession.commit() self.logger.info("DummyImporter: imported slice: %s" % slice_record) self.remember_record ( slice_record ) except: self.logger.log_exc("DummyImporter: failed to import slice") else: # xxx update the record ... self.logger.warning ("Slice update not yet implemented") pass # record current users affiliated with the slice slice_record.reg_researchers = \ [ self.locate_by_type_pointer ('user',user_id) for user_id in slice['user_ids'] ] global_dbsession.commit() slice_record.stale=False ### remove stale records # special records must be preserved system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager'] for record in all_records: if record.hrn in system_hrns: record.stale=False if record.peer_authority: record.stale=False for record in all_records: try: stale=record.stale except: stale=True self.logger.warning("stale not found with %s"%record) if stale: self.logger.info("DummyImporter: deleting stale record: %s" % record) global_dbsession.delete(record) global_dbsession.commit()