def _object_update(self, res_type, obj): obj_cls = get_object_class(res_type) # Read in uuid from api-server if not specified in obj if not obj.uuid: obj.uuid = self.fq_name_to_id(res_type, obj.get_fq_name()) # Generate PUT on object only if some attr was modified content = None if obj.get_pending_updates(): # Ignore fields with None value in json representation obj_json_param = json.dumps(obj, default=self._obj_serializer) if obj_json_param: json_body = '{"%s":%s}' %(res_type, obj_json_param) uri = obj_cls.resource_uri_base[res_type] + '/' + obj.uuid content = self._request_server(rest.OP_PUT, uri, data=json_body) # Generate POST on /prop-collection-update if needed/pending prop_coll_body = {'uuid': obj.uuid, 'updates': []} operations = [] for prop_name in obj._pending_field_list_updates: operations.extend(obj._pending_field_list_updates[prop_name]) for prop_name in obj._pending_field_map_updates: operations.extend(obj._pending_field_map_updates[prop_name]) for oper, elem_val, elem_pos in operations: if isinstance(elem_val, GeneratedsSuper): serialized_elem_value = elem_val.exportDict('') else: serialized_elem_value = elem_val prop_coll_body['updates'].append( {'field': prop_name, 'operation': oper, 'value': serialized_elem_value, 'position': elem_pos}) if prop_coll_body['updates']: prop_coll_json = json.dumps(prop_coll_body) self._request_server(rest.OP_POST, self._action_uri['prop-collection-update'], data=prop_coll_json) # Generate POST on /ref-update if needed/pending for ref_name in obj._pending_ref_updates: ref_orig = set([(x.get('uuid'), tuple(x.get('to', [])), x.get('attr')) for x in getattr(obj, '_original_' + ref_name, [])]) ref_new = set([(x.get('uuid'), tuple(x.get('to', [])), x.get('attr')) for x in getattr(obj, ref_name, [])]) for ref in ref_orig - ref_new: self.ref_update(res_type, obj.uuid, ref_name, ref[0], list(ref[1]), 'DELETE') for ref in ref_new - ref_orig: self.ref_update(res_type, obj.uuid, ref_name, ref[0], list(ref[1]), 'ADD', ref[2]) obj.clear_pending_updates() return content
def _update_ref(self, bch, obj_type, obj_uuid, ref_type, old_ref_uuid, new_ref_infos): if ref_type not in new_ref_infos: # update body didn't touch this type, nop return if old_ref_uuid not in new_ref_infos[ref_type]: # remove old ref bch.remove(obj_uuid, columns=[ 'ref:%s:%s' % (ref_type, old_ref_uuid)]) if obj_type == ref_type: bch.remove(old_ref_uuid, columns=[ 'ref:%s:%s' % (obj_type, obj_uuid)]) else: bch.remove(old_ref_uuid, columns=[ 'backref:%s:%s' % (obj_type, obj_uuid)]) else: # retain old ref with new ref attr new_ref_data = new_ref_infos[ref_type][old_ref_uuid] bch.insert( obj_uuid, {'ref:%s:%s' % (ref_type, old_ref_uuid): json.dumps(new_ref_data)}) if obj_type == ref_type: bch.insert( old_ref_uuid, {'ref:%s:%s' % (obj_type, obj_uuid): json.dumps(new_ref_data)}) else: bch.insert( old_ref_uuid, {'backref:%s:%s' % (obj_type, obj_uuid): json.dumps(new_ref_data)}) # uuid has been accounted for, remove so only new ones remain del new_ref_infos[ref_type][old_ref_uuid]
def insert_client(self, service_type, service_id, client_id, blob, ttl): data = {'ttl': ttl, 'blob': blob} path = '/services/%s/%s/%s' % (service_type, service_id, client_id) self.create_node(path, value=json.dumps(data)) path = '/clients/%s/%s/%s' % (service_type, client_id, service_id) self.create_node(path, value=json.dumps(data), makepath=True)
def _create_child(self, bch, parent_type, parent_uuid, child_type, child_uuid): child_col = {'children:%s:%s' % (child_type, child_uuid): json.dumps(None)} bch.insert(parent_uuid, child_col) parent_col = {'parent:%s:%s' % (parent_type, parent_uuid): json.dumps(None)} bch.insert(child_uuid, parent_col)
def build_idperms_ifmap_obj(prop_field, values): prop_xml = u'<uuid><uuid-mslong>' prop_xml += unicode(json.dumps(values[u'uuid'][u'uuid_mslong'])) prop_xml += u'</uuid-mslong><uuid-lslong>' prop_xml += unicode(json.dumps(values[u'uuid'][u'uuid_lslong'])) prop_xml += u'</uuid-lslong></uuid><enable>' prop_xml += unicode(json.dumps(values[u'enable'])) prop_xml += u'</enable>' return prop_xml
def _build_idperms_ifmap_obj(prop_field, values): prop_xml = u"<uuid><uuid-mslong>" prop_xml += unicode(json.dumps(values[u"uuid"][u"uuid_mslong"])) prop_xml += u"</uuid-mslong><uuid-lslong>" prop_xml += unicode(json.dumps(values[u"uuid"][u"uuid_lslong"])) prop_xml += u"</uuid-lslong></uuid><enable>" prop_xml += unicode(json.dumps(values[u"enable"])) prop_xml += u"</enable>" return prop_xml
def _object_create(self, res_type, obj_ids, obj_dict): obj_type = res_type.replace('-', '_') obj_class = self._get_resource_class(obj_type) # Gather column values for obj and updates to backrefs # in a batch and write it at the end bch = self._obj_uuid_cf.batch() obj_cols = {} obj_cols['fq_name'] = json.dumps(obj_dict['fq_name']) obj_cols['type'] = json.dumps(obj_type) if 'parent_type' in obj_dict: # non config-root child parent_type = obj_dict['parent_type'] parent_method_type = parent_type.replace('-', '_') parent_fq_name = obj_dict['fq_name'][:-1] obj_cols['parent_type'] = json.dumps(parent_type) parent_uuid = self.fq_name_to_uuid(parent_method_type, parent_fq_name) self._create_child(bch, parent_method_type, parent_uuid, obj_type, obj_ids['uuid']) # Properties for prop_field in obj_class.prop_fields: field = obj_dict.get(prop_field) if field is None: continue if prop_field == 'id_perms': field['created'] = datetime.datetime.utcnow().isoformat() field['last_modified'] = field['created'] self._create_prop(bch, obj_ids['uuid'], prop_field, field) # References # e.g. ref_field = 'network_ipam_refs' # ref_type = 'network-ipam' # ref_link_type = 'VnSubnetsType' # is_weakref = False for ref_field in obj_class.ref_fields: ref_type, ref_link_type, _ = obj_class.ref_field_types[ref_field] refs = obj_dict.get(ref_field, []) for ref in refs: ref_uuid = self.fq_name_to_uuid(ref_type, ref['to']) ref_attr = ref.get('attr') ref_data = {'attr': ref_attr, 'is_weakref': False} self._create_ref(bch, obj_type, obj_ids['uuid'], ref_type.replace('-', '_'), ref_uuid, ref_data) bch.insert(obj_ids['uuid'], obj_cols) bch.send() # Update fqname table fq_name_str = ':'.join(obj_dict['fq_name']) fq_name_cols = {utils.encode_string(fq_name_str) + ':' + obj_ids['uuid']: json.dumps(None)} self._obj_fq_name_cf.insert(obj_type, fq_name_cols) return (True, '')
def _object_create(self, res_type, obj): obj_type = res_type.replace('-', '_') obj_cls = get_object_class(res_type) obj._pending_field_updates |= obj._pending_ref_updates obj._pending_ref_updates = set([]) # Ignore fields with None value in json representation # encode props + refs in object body obj_json_param = json.dumps(obj, default=self._obj_serializer) json_body = '{"%s":%s}' %(res_type, obj_json_param) content = self._request_server(rest.OP_POST, obj_cls.create_uri, data=json_body) obj_dict = json.loads(content)[res_type] obj.uuid = obj_dict['uuid'] if 'parent_uuid' in obj_dict: obj.parent_uuid = obj_dict['parent_uuid'] obj.set_server_conn(self) # encode any prop-<list|map> operations and # POST on /prop-collection-update prop_coll_body = {'uuid': obj.uuid, 'updates': []} operations = [] for prop_name in obj._pending_field_list_updates: operations.extend(obj._pending_field_list_updates[prop_name]) for prop_name in obj._pending_field_map_updates: operations.extend(obj._pending_field_map_updates[prop_name]) for oper, elem_val, elem_pos in operations: if isinstance(elem_val, GeneratedsSuper): serialized_elem_value = elem_val.exportDict('') else: serialized_elem_value = elem_val prop_coll_body['updates'].append( {'field': prop_name, 'operation': oper, 'value': serialized_elem_value, 'position': elem_pos}) # all pending fields picked up obj.clear_pending_updates() if prop_coll_body['updates']: prop_coll_json = json.dumps(prop_coll_body) self._request_server(rest.OP_POST, self._action_uri['prop-collection-update'], data=prop_coll_json) return obj.uuid
def _create_ref(self, bch, obj_type, obj_uuid, ref_type, ref_uuid, ref_data): bch.insert( obj_uuid, {'ref:%s:%s' % (ref_type, ref_uuid): json.dumps(ref_data)}) if obj_type == ref_type: bch.insert( ref_uuid, {'ref:%s:%s' % (obj_type, obj_uuid): json.dumps(ref_data)}) else: bch.insert( ref_uuid, {'backref:%s:%s' % (obj_type, obj_uuid): json.dumps(ref_data)})
def __init__(self, dc, service_type, count, f=None, *args, **kw): self.dc = dc self.f = f self.kw = kw self.args = args self.count = count self.service_type = service_type self._headers = { 'Content-type': 'application/json', } self.info = [] infostr = json.dumps(self.info) self.sig = hashlib.md5(infostr).hexdigest() self.done = False self.stats = { 'service_type' : service_type, 'request' : 0, 'response' : 0, 'conn_error' : 0, 'timeout' : 0, 'exc_unknown' : 0, 'exc_info' : '', 'instances' : count, 'ttl' : 0, 'blob' : '', } data = { 'service': service_type, 'instances': count, 'client-type': dc._client_type, 'remote-addr': dc._myip, 'client': dc._myid } self.post_body = json.dumps(data) self.url = "http://%s:%s/subscribe" % (dc._server_ip, dc._server_port) if f: # asynch - callback when new info is received ConnectionState.update(conn_type = ConnectionType.DISCOVERY, name = self.service_type, status = ConnectionStatus.INIT, message = 'Subscribe', server_addrs = ['%s:%s' % (dc._server_ip, dc._server_port)]) self.task = gevent.spawn(self.ttl_loop) else: self._query() self.done = True
def mark_delete_subscription(self, service_type, client_id, service_id): col_name = ('client', client_id, service_id) x = self._disco_cf.get(service_type, columns = [col_name]) data = [json.loads(val) for col,val in x.items()] entry = data[0] entry['expired'] = True self._disco_cf.insert(service_type, {col_name : json.dumps(entry)}) col_name = ('subscriber', service_id, client_id) x = self._disco_cf.get(service_type, columns = [col_name]) data = [json.loads(val) for col,val in x.items()] entry = data[0] entry['expired'] = True self._disco_cf.insert(service_type, {col_name : json.dumps(entry)})
def update_service(self, sid, entry): url = "http://%s:%s/service/%s" % ( self._server_ip, self._server_port, sid) body = json.dumps(entry) r = requests.put(url, data=body, headers=self._headers) #print 'update_service response = ', r return r.status_code
def api_publish(self, end_point = None): self._debug['msg_pubs'] += 1 ctype = bottle.request.headers['content-type'] json_req = {} if ctype == 'application/json': data = bottle.request.json for service_type, info in data.items(): json_req['name'] = service_type json_req['info'] = info elif ctype == 'application/xml': data = xmltodict.parse(bottle.request.body.read()) for service_type, info in data.items(): json_req['name'] = service_type json_req['info'] = dict(info) else: bottle.abort(400, e) sig = end_point or publisher_id( bottle.request.environ['REMOTE_ADDR'], json.dumps(json_req)) # Rx {'name': u'ifmap-server', 'info': {u'ip_addr': u'10.84.7.1', # u'port': u'8443'}} info = json_req['info'] service_type = json_req['name'] entry = self._db_conn.lookup_service(service_type, service_id=sig) if not entry: entry = { 'service_type': service_type, 'service_id': sig, 'in_use': 0, 'ts_use': 0, 'ts_created': int(time.time()), 'prov_state': 'new', 'remote': bottle.request.environ.get('REMOTE_ADDR'), 'sequence': str(int(time.time())) + socket.gethostname(), } elif 'sequence' not in entry or self.service_expired(entry): # handle upgrade or republish after expiry entry['sequence'] = str(int(time.time())) + socket.gethostname() entry['info'] = info entry['admin_state'] = 'up' entry['heartbeat'] = int(time.time()) # insert entry if new or timed out self._db_conn.update_service(service_type, sig, entry) response = {'cookie': sig + ':' + service_type} if ctype != 'application/json': response = xmltodict.unparse({'response': response}) self.syslog('publish service "%s", sid=%s, info=%s' % (service_type, sig, info)) if not service_type.lower() in self.service_config: self.service_config[ service_type.lower()] = self._args.default_service_opts return response
def _object_update(self, res_type, obj): obj_type = res_type.replace('-', '_') obj_cls = get_object_class(res_type) # Read in uuid from api-server if not specified in obj if not obj.uuid: obj.uuid = self.fq_name_to_id(res_type, obj.get_fq_name()) # Ignore fields with None value in json representation json_param = json.dumps(obj, default=self._obj_serializer) json_body = '{"%s":%s}' %(res_type, json_param) id = obj.uuid uri = obj_cls.resource_uri_base[res_type] + '/' + id content = self._request_server(rest.OP_PUT, uri, data=json_body) for ref_name in obj._pending_ref_updates: ref_orig = set([(x.get('uuid'), tuple(x.get('to', [])), x.get('attr')) for x in getattr(obj, '_original_' + ref_name, [])]) ref_new = set([(x.get('uuid'), tuple(x.get('to', [])), x.get('attr')) for x in getattr(obj, ref_name, [])]) for ref in ref_orig - ref_new: self.ref_update(res_type, obj.uuid, ref_name, ref[0], list(ref[1]), 'DELETE') for ref in ref_new - ref_orig: self.ref_update(res_type, obj.uuid, ref_name, ref[0], list(ref[1]), 'ADD', ref[2]) obj.clear_pending_updates() return content
def kv_store(self, key, value): # TODO move oper value to common json_body = json.dumps({'operation': 'STORE', 'key': key, 'value': value}) uri = self._action_uri['useragent-keyvalue'] self._request_server(rest.OP_POST, uri, data=json_body)
def plugin_http_post_firewall_policy(self): """ Bottle callback for firewall_policy POST """ context, firewall_policy = self._get_requests_data() cfgdb = self._get_user_cfgdb(context) if context['operation'] == 'CREATE': return cfgdb.firewall_policy_create(context, firewall_policy['resource']) elif context['operation'] == 'READ': fields = firewall_policy['fields'] return cfgdb.firewall_policy_read(context, firewall_policy['id'], fields) elif context['operation'] == 'READALL': filters = firewall_policy['filters'] fields = firewall_policy['fields'] return json.dumps( cfgdb.firewall_policy_list(context, filters, fields)) elif context['operation'] == 'UPDATE': return cfgdb.firewall_policy_update(context, firewall_policy['id'], firewall_policy['resource']) elif context['operation'] == 'DELETE': return cfgdb.firewall_policy_delete(context, firewall_policy['id']) elif context['operation'] == 'INSERT_RULE': return cfgdb.firewall_policy_insert_rule( context, firewall_policy['id'], firewall_policy['resource']) elif context['operation'] == 'REMOVE_RULE': return cfgdb.firewall_policy_remove_rule( context, firewall_policy['id'], firewall_policy['resource'])
def id_to_fq_name_type(self, id): json_body = json.dumps({'uuid': id}) uri = self._action_uri['id-to-name'] content = self._request_server(rest.OP_POST, uri, data=json_body) json_rsp = json.loads(content) return (json_rsp['fq_name'], json_rsp['type'])
def set_aaa_mode(self, mode): if mode not in cfgm_common.AAA_MODE_VALID_VALUES: raise HttpError(400, "Invalid AAA mode") url = self._action_uri["aaa-mode"] data = {"aaa-mode": mode} content = self._request_server(rest.OP_PUT, url, json.dumps(data)) return json.loads(content)
def id_to_fq_name_type(self, id): json_body = json.dumps({"uuid": id}) uri = self._action_uri["id-to-name"] content = self._request_server(rest.OP_POST, uri, data=json_body) json_rsp = json.loads(content) return (json_rsp["fq_name"], json_rsp["type"])
def kv_retrieve(self, key=None): # if key is None, entire collection is retrieved, use with caution! # TODO move oper value to common json_body = json.dumps({"operation": "RETRIEVE", "key": key}) uri = self._action_uri["useragent-keyvalue"] content = self._request_server(rest.OP_POST, uri, data=json_body) return json.loads(content)["value"]
def insert_client(self, service_type, service_id, client_id, blob, ttl): col_val = json.dumps({'ttl': ttl, 'blob': blob, 'mtime': int(time.time())}) col_name = ('subscriber', service_id, client_id) self._disco_cf.insert(service_type, {col_name : col_val}, ttl = ttl + disc_consts.TTL_EXPIRY_DELTA) col_name = ('client', client_id, service_id) self._disco_cf.insert(service_type, {col_name : col_val}, ttl = ttl + disc_consts.TTL_EXPIRY_DELTA)
def _query(self): conn_state_updated = False # hoping all errors are transient and a little wait will solve the problem while True: try: self.stats['request'] += 1 r = requests.post( self.url, data=self.post_body, headers=self._headers, timeout=5) if r.status_code == 200: break self.inc_stats('sc_%d' % r.status_code) emsg = "Status Code %d" % r.status_code except requests.exceptions.ConnectionError: self.stats['conn_error'] += 1 emsg = 'Connection Error' except (requests.exceptions.Timeout, socket.timeout): self.stats['timeout'] += 1 emsg = 'Request Timeout' self.syslog('connection error or failed to subscribe') if not conn_state_updated: conn_state_updated = True ConnectionState.update( conn_type = ConnectionType.DISCOVERY, name = self.service_type, status = ConnectionStatus.DOWN, message = 'Subscribe - %s' % emsg, server_addrs = \ ['%s:%s' % (self.dc._server_ip, \ self.dc._server_port)]) gevent.sleep(2) # end while self.syslog('query resp => %s ' % r.text) response = r.json() # avoid signature on ttl which can change between iterations info = response[self.service_type] infostr = json.dumps(info) sig = hashlib.md5(infostr).hexdigest() self.stats['response'] += 1 self.stats['ttl'] = response['ttl'] self.stats['blob'] = infostr self.ttl = response['ttl'] self.change = False if sig != self.sig: #print 'signature mismatch! old=%s, new=%s' % (self.sig, sig) self.info = info self.sig = sig self.change = True ConnectionState.update(conn_type = ConnectionType.DISCOVERY, name = self.service_type, status = ConnectionStatus.UP, message = 'Subscribe Response', server_addrs = ['%s:%s' % (self.dc._server_ip, \ self.dc._server_port)])
def plugin_get_policys(self, context, policy): """ Policys get request """ filters = policy['filters'] cfgdb = self._get_user_cfgdb(context) policys_info = cfgdb.policy_list(context, filters) return json.dumps(policys_info)
def plugin_get_sec_group_rules(self, context, sg_rule): """ Security group rules get request """ filters = sg_rule['filters'] cfgdb = self._get_user_cfgdb(context) sg_rules_info = cfgdb.security_group_rule_list(context, filters) return json.dumps(sg_rules_info)
def plugin_get_routers(self, context, router): """ Routers get request """ filters = router['filters'] cfgdb = self._get_user_cfgdb(context) routers_info = cfgdb.router_list(context, filters) return json.dumps(routers_info)
def plugin_get_floatingips(self, context, floatingip): """ Floating IPs get request """ filters = floatingip['filters'] cfgdb = self._get_user_cfgdb(context) floatingips_info = cfgdb.floatingip_list(context, filters) return json.dumps(floatingips_info)
def plugin_get_subnets(self, context, subnet): """ Subnets get request """ filters = subnet['filters'] cfgdb = self._get_user_cfgdb(context) subnets_info = cfgdb.subnets_list(context, filters) return json.dumps([self._make_subnet_dict(i) for i in subnets_info])
def plugin_get_networks(self, context, network): """ Networks get request """ filters = network['filters'] cfgdb = self._get_user_cfgdb(context) nets_info = cfgdb.network_list(context, filters) return json.dumps(nets_info)
def plugin_get_svc_instances(self, context, svc_instance): """ Service instance get request """ filters = svc_instance['filters'] cfgdb = self._get_user_cfgdb(context) sis_info = cfgdb.svc_instance_list(context, filters) return json.dumps(sis_info)
def plugin_get_route_tables(self, context, route_table): """ Route Tables get request """ filters = route_table['filters'] cfgdb = self._get_user_cfgdb(context) rts_info = cfgdb.route_table_list(context, filters) return json.dumps(rts_info)
def plugin_get_ipams(self, context, ipam): """ IPAM get request """ filters = ipam['filters'] cfgdb = self._get_user_cfgdb(context) ipams_info = cfgdb.ipam_list(context, filters) return json.dumps(ipams_info)
def ifmap_to_id(self, ifmap_id): json_body = json.dumps({'ifmap_id': ifmap_id}) uri = self._action_uri['ifmap-to-id'] try: content = self._request_server(rest.OP_POST, uri, data=json_body) except HttpError as he: if he.status_code == 404: return None return json.loads(content)['uuid']
def _update_prop(self, bch, obj_uuid, prop_name, new_props): if new_props[prop_name] is None: bch.remove(obj_uuid, columns=['prop:' + prop_name]) else: bch.insert( obj_uuid, {'prop:' + prop_name: json.dumps(new_props[prop_name])}) # prop has been accounted for, remove so only new ones remain del new_props[prop_name]
def locate(cls, fq_name=None, uuid=None, create_it=True, **kwargs): if fq_name is not None and uuid is None: try: uuid = cls.db_conn.fq_name_to_uuid(cls.object_type, fq_name) except NoIdError as e: if create_it: pass else: return False, (404, str(e)) if uuid: try: ok, result = cls.db_conn.dbe_read( cls.object_type, uuid, obj_fields=kwargs.get('fields')) except NoIdError as e: if create_it: pass else: return False, (404, str(e)) if not ok: return False, result else: return ok, result # Does not exist, create it. Need at least an fq_name if fq_name is None or fq_name == []: msg = ("Cannot create %s without at least a FQ name" % cls.object_type.replace('_', ' ').title()) return False, (400, msg) parent_obj = None if kwargs.get('parent_type') is not None: parent_class = cls.server.get_resource_class(kwargs['parent_type']) parent_obj = parent_class(fq_name=fq_name[:-1]) parent_obj.uuid = kwargs.get('parent_uuid') obj = cls(parent_obj=parent_obj, **kwargs) obj.fq_name = fq_name obj.uuid = kwargs.get('uuid') obj_dict = json.loads(json.dumps(obj, default=_obj_serializer_all)) for ref_name in cls.ref_fields & set(kwargs.keys()): obj_dict[ref_name] = copy.deepcopy(kwargs[ref_name]) try: cls.server.internal_request_create(cls.resource_type, obj_dict) except HttpError as e: if e.status_code != 409: return False, (e.status_code, e.content) else: # Ignore the refsExistError. cls.db_conn.config_log('Object ' '%s uuid %s already been created.' % (' '.join(fq_name), uuid), level=SandeshLevel.SYS_DEBUG) try: uuid = cls.db_conn.fq_name_to_uuid(cls.object_type, fq_name) except NoIdError as e: return False, (404, str(e)) return cls.db_conn.dbe_read(cls.object_type, obj_id=uuid)
def _publish_int(self, service, data, oper_state='up', msg=''): self.syslog('Publish service "%s", data "%s"' % (service, data)) payload = { service: data, 'service-type': service, 'remote-addr': self._remote_addr, 'oper-state': oper_state, 'oper-state-reason': msg } emsg = None cookie = None try: self.inc_pub_stats(service, 'request') r = requests.post(self.puburl, data=json.dumps(payload), headers=self._headers, timeout=5) if r.status_code != 200: self.inc_pub_stats(service, 'sc_%d' % r.status_code) emsg = 'Status Code ' + str(r.status_code) except requests.exceptions.ConnectionError: self.inc_pub_stats(service, 'conn_error') emsg = 'Connection Error' except requests.exceptions.Timeout: self.inc_pub_stats(service, 'timeout') emsg = 'Request Timeout' finally: ConnectionState.update(conn_type = ConnectionType.DISCOVERY, name = service, status = ConnectionStatus.DOWN if emsg else ConnectionStatus.UP, server_addrs = ['%s:%s' % (self._server_ip, \ self._server_port)], message = 'Publish Error - %s' % emsg if emsg else 'Publish Success') if not emsg: self.inc_pub_stats(service, 'response') self.inc_pub_stats(service, 'blob', value=json.dumps(data)) response = r.json() cookie = response['cookie'] self.pubdata[cookie] = (service, data) self.syslog('Saving token %s' % (cookie)) return cookie
def fq_name_to_id(self, obj_type, fq_name): json_body = json.dumps({'type': obj_type, 'fq_name': fq_name}) uri = self._action_uri['name-to-id'] try: content = self._request_server(rest.OP_POST, uri, data=json_body) except HttpError as he: if he.status_code == 404: return None raise he return json.loads(content)['uuid']
def plugin_get_trunks(self, context, trunk): """ Ports get request """ filters = trunk['filters'] fields = trunk['fields'] cfgdb = self._get_user_cfgdb(context) trunk_info = cfgdb.trunk_list(context, filters, fields) return json.dumps(trunk_info)
def insert_client(self, service_type, service_id, client_id, blob, ttl): col_val = json.dumps({ 'ttl': ttl, 'blob': blob, 'mtime': int(time.time()) }) col_name = ('subscriber', service_id, client_id) self._disco_cf.insert(service_type, {col_name: col_val}, ttl=ttl + disc_consts.TTL_EXPIRY_DELTA) col_name = ('client', client_id, service_id) self._disco_cf.insert(service_type, {col_name: col_val}, ttl=ttl + disc_consts.TTL_EXPIRY_DELTA)
def api_query(self): self._debug['msg_query'] += 1 if self._db_conn.is_restarting(): self._debug['restarting'] += 1 bottle.abort(503, 'Service Unavailable') ctype = bottle.request.headers['content-type'] if ctype == 'application/json': json_req = bottle.request.json elif ctype == 'application/xml': data = xmltodict.parse(bottle.request.body.read()) json_req = {} for service_type, info in data.items(): json_req['service'] = service_type json_req.update(dict(info)) else: bottle.abort(400, e) service_type = json_req['service'] count = int(json_req['instances']) r = [] # lookup publishers of the service pubs = self._db_conn.query_service(service_type) if not pubs: return {service_type: r} # eliminate inactive services pubs_active = [item for item in pubs if not self.service_expired(item)] self.syslog(' query: Found %s publishers, %d active, need %d' % (len(pubs), len(pubs_active), count)) # find least loaded instances pubs = pubs_active # prepare response - send all if count 0 for index in range(min(count, len(pubs)) if count else len(pubs)): entry = pubs[index] result = entry['info'] r.append(result) self.syslog(' assign service=%s, info=%s' % (entry['service_id'], json.dumps(result))) # don't update pubsub data if we are sending entire list if count == 0: continue response = {service_type: r} if ctype == 'application/xml': response = xmltodict.unparse({'response': response}) return response
def virtual_network_ip_alloc(self, vnobj, count=1, subnet=None, family=None): json_body = json.dumps({ 'count': count, 'subnet': subnet, 'family': family }) uri = self._action_uri['virtual-network-ip-alloc'] % vnobj.uuid content = self._request_server(rest.OP_POST, uri, data=json_body) return json.loads(content)['ip_addr']
def ref_relax_for_delete(self, obj_uuid, ref_uuid): # don't account for reference of <obj_uuid> in delete of <ref_uuid> in future json_body = json.dumps({'uuid': obj_uuid, 'ref-uuid': ref_uuid}) uri = self._action_uri['ref-relax-for-delete'] try: content = self._request_server(rest.OP_POST, uri, data=json_body) except HttpError as he: if he.status_code == 404: return None raise he return json.loads(content)['uuid']
def __init__(self, dc, service_type, count, f=None, *args, **kw): self.dc = dc self.f = f self.kw = kw self.args = args self.count = count self.service_type = service_type self._headers = { 'Content-type': 'application/json', } self.info = [] infostr = json.dumps(self.info) self.sig = hashlib.md5(infostr).hexdigest() self.done = False data = { 'service': service_type, 'instances': count, 'client-type': dc._client_type, 'remote-addr': dc._myip, 'client': dc._myid } self.post_body = json.dumps(data) self.url = "http://%s:%s/subscribe" % (dc._server_ip, dc._server_port) if f: # asynch - callback when new info is received ConnectionState.update(conn_type = ConnectionType.DISCOVERY, name = self.service_type, status = ConnectionStatus.INIT, message = 'Subscribe', server_addrs = ['%s:%s' % (dc._server_ip, dc._server_port)]) self.task = gevent.spawn(self.ttl_loop) else: self._query() self.done = True
def plugin_get_route_tables(self, context, route_table): """ Route Tables get request """ filters = route_table['filters'] try: cfgdb = self._get_user_cfgdb(context) rts_info = cfgdb.route_table_list(context, filters) return json.dumps(rts_info) except Exception as e: cgitb.Hook(format="text").handle(sys.exc_info()) raise
def plugin_get_sec_groups(self, context, sg): """ Security groups get request """ filters = sg['filters'] try: cfgdb = self._get_user_cfgdb(context) sgs_info = cfgdb.security_group_list(context, filters) return json.dumps(sgs_info) except Exception as e: cgitb.Hook(format="text").handle(sys.exc_info()) raise
def plugin_get_subnets(self, context, subnet): """ Subnets get request """ filters = subnet['filters'] try: cfgdb = self._get_user_cfgdb(context) subnets_info = cfgdb.subnets_list(context, filters) return json.dumps([self._make_subnet_dict(i) for i in subnets_info]) except Exception as e: cgitb_hook(format="text") raise
def plugin_get_floatingips(self, context, floatingip): """ Floating IPs get request """ filters = floatingip['filters'] try: cfgdb = self._get_user_cfgdb(context) floatingips_info = cfgdb.floatingip_list(context, filters) return json.dumps(floatingips_info) except Exception as e: cgitb_hook(format="text") raise
def plugin_get_ipams(self, context, ipam): """ IPAM get request """ filters = ipam['filters'] try: cfgdb = self._get_user_cfgdb(context) ipams_info = cfgdb.ipam_list(context, filters) return json.dumps(ipams_info) except Exception as e: cgitb_hook(format="text") raise
def plugin_get_ports(self, context, port): """ Ports get request """ filters = port['filters'] try: cfgdb = self._get_user_cfgdb(context) ports_info = cfgdb.port_list(context, filters) return json.dumps(ports_info) except Exception as e: cgitb_hook(format="text") raise
def plugin_get_sec_group_rules(self, context, sg_rule): """ Security group rules get request """ filters = sg_rule['filters'] try: cfgdb = self._get_user_cfgdb(context) sg_rules_info = cfgdb.security_group_rule_list(context, filters) return json.dumps(sg_rules_info) except Exception as e: cgitb_hook(format="text") raise
def plugin_get_svc_instances(self, context, svc_instance): """ Service instance get request """ filters = svc_instance['filters'] try: cfgdb = self._get_user_cfgdb(context) sis_info = cfgdb.svc_instance_list(context, filters) return json.dumps(sis_info) except Exception as e: cgitb_hook(format="text") raise
def plugin_get_policys(self, context, policy): """ Policys get request """ filters = policy['filters'] try: cfgdb = self._get_user_cfgdb(context) policys_info = cfgdb.policy_list(context, filters) return json.dumps(policys_info) except Exception as e: cgitb.Hook(format="text").handle(sys.exc_info()) raise
def plugin_get_networks(self, context, network): """ Networks get request """ filters = network['filters'] try: cfgdb = self._get_user_cfgdb(context) nets_info = cfgdb.network_list(context, filters) return json.dumps(nets_info) except Exception as e: cgitb_hook(format="text") raise
def plugin_get_routers(self, context, router): """ Routers get request """ filters = router['filters'] try: cfgdb = self._get_user_cfgdb(context) routers_info = cfgdb.router_list(context, filters) return json.dumps(routers_info) except Exception as e: cgitb_hook(format="text") raise
def db_update_service_entry_oper_state(self): col_name = ('service',) data = self._disco_cf.get_range(column_start = col_name, column_finish = col_name) for service_type, services in data: for col_name in services: (_, _, tag) = col_name if tag != 'service-entry': continue col_value = services[col_name] entry = json.loads(col_value) if 'oper_state' not in entry: entry['oper_state'] = 'up' entry['oper_state_msg'] = '' self._disco_cf.insert(service_type, {col_name : json.dumps(entry)}) self._debug['db_upd_oper_state'] += 1