def lookup_service(self, service_type, service_id=None): try: if service_id: services = self._disco_cf.get(service_type, columns=[('service', service_id, 'service-entry')]) data = [json.loads(val) for col, val in services.items()] entry = data[0] col_name = ( 'subscriber', service_id, ) entry['in_use'] = self._disco_cf.get_count( service_type, column_start=col_name, column_finish=col_name) return entry else: col_name = ('service', ) services = self._disco_cf.get(service_type, column_start=col_name, column_finish=col_name) data = [json.loads(val) for col, val in services.items()] for entry in data: col_name = ( 'subscriber', entry['service_id'], ) entry['in_use'] = self._disco_cf.get_count( service_type, column_start=col_name, column_finish=col_name) return data except pycassa.NotFoundException: return None
def lookup_client(self, service_type, client_id, subs=False): r = [] col_name = ( 'client', client_id, ) try: subs = self._disco_cf.get(service_type, column_start=col_name, column_finish=col_name, include_timestamp=True) # sort columns by timestamp (subs is array of (col_name, (value, timestamp))) subs = sorted(subs.items(), key=lambda entry: entry[1][1]) # col_name = (client, cliend_id, service_id) # col_val = (real-value, timestamp) for col_name, col_val in subs: foo, client_id, service_id = col_name if service_id == disc_consts.CLIENT_TAG: data = json.loads(col_val[0]) continue entry = json.loads(col_val[0]) r.append((col_name[2], entry['blob'])) return (data, r) except pycassa.NotFoundException: return (None, [])
def loadbalancer_list(self): ret_list = [] for each_entry_id, each_entry_data in self._db_list(self._lb_cf) or []: config_info_obj_dict = json.loads(each_entry_data["config_info"]) driver_info_obj_dict = None if "driver_info" in each_entry_data: driver_info_obj_dict = json.loads(each_entry_data["driver_info"]) ret_list.append((each_entry_id, config_info_obj_dict, driver_info_obj_dict)) return ret_list
def pool_list(self): ret_list = [] for each_entry_id, each_entry_data in self._db_list(self._lb_cf) or []: config_info_obj_dict = json.loads(each_entry_data['config_info']) driver_info_obj_dict = None if 'driver_info' in each_entry_data: driver_info_obj_dict = json.loads(each_entry_data['driver_info']) ret_list.append((each_entry_id, config_info_obj_dict, driver_info_obj_dict)) return ret_list
def uuid_to_obj_type(self, id): try: return self._cache_uuid_to_fq_name[id][1] except KeyError: try: obj = self._obj_uuid_cf.get(id, columns=['fq_name', 'type']) except pycassa.NotFoundException: raise NoIdError(id) fq_name = json.loads(obj['fq_name']) obj_type = json.loads(obj['type']) self.cache_uuid_to_fq_name_add(id, fq_name, obj_type) return obj_type
def mark_delete_subscription(self, service_type, client_id, service_id): col_name = ('client', client_id, service_id) x = self._disco_cf.get(service_type, columns = [col_name]) data = [json.loads(val) for col,val in x.items()] entry = data[0] entry['expired'] = True self._disco_cf.insert(service_type, {col_name : json.dumps(entry)}) col_name = ('subscriber', service_id, client_id) x = self._disco_cf.get(service_type, columns = [col_name]) data = [json.loads(val) for col,val in x.items()] entry = data[0] entry['expired'] = True self._disco_cf.insert(service_type, {col_name : json.dumps(entry)})
def restore_config(self, create, resource, json_body): cls = utils.obj_type_to_vnc_class(resource, __name__) if not cls: return None if create: uri = cls.create_uri content = self._request_server(rest.OP_POST, uri, data=json_body) else: obj_dict = json.loads(json_body) uri = cls.resource_uri_base[resource] + '/' uri += obj_dict[resource]['uuid'] content = self._request_server(rest.OP_PUT, uri, data=json_body) return json.loads(content)
def _object_read(self, res_type, fq_name=None, fq_name_str=None, id=None, ifmap_id=None, fields=None): obj_cls = get_object_class(res_type) (args_ok, result) = self._read_args_to_id( res_type, fq_name, fq_name_str, id, ifmap_id) if not args_ok: return result id = result uri = obj_cls.resource_uri_base[res_type] + '/' + id if fields: comma_sep_fields = ','.join(f for f in fields) query_params = {'fields': comma_sep_fields} else: query_params = {'exclude_back_refs':True, 'exclude_children':True,} content = self._request_server(rest.OP_GET, uri, query_params) obj_dict = json.loads(content)[res_type] obj = obj_cls.from_dict(**obj_dict) obj.clear_pending_updates() obj.set_server_conn(self) return obj
def restore_config(self, create, resource, json_body): class_name = "%s" % (utils.CamelCase(resource)) cls = str_to_class(class_name) if not cls: return None if create: uri = cls.create_uri content = self._request_server(rest.OP_POST, uri, data=json_body) else: obj_dict = json.loads(json_body) uri = cls.resource_uri_base[resource] + '/' uri += obj_dict[resource]['uuid'] content = self._request_server(rest.OP_PUT, uri, data=json_body) return json.loads(content)
def id_to_fq_name_type(self, id): json_body = json.dumps({"uuid": id}) uri = self._action_uri["id-to-name"] content = self._request_server(rest.OP_POST, uri, data=json_body) json_rsp = json.loads(content) return (json_rsp["fq_name"], json_rsp["type"])
def id_to_fq_name_type(self, id): json_body = json.dumps({'uuid': id}) uri = self._action_uri['id-to-name'] content = self._request_server(rest.OP_POST, uri, data=json_body) json_rsp = json.loads(content) return (json_rsp['fq_name'], json_rsp['type'])
def set_aaa_mode(self, mode): if mode not in cfgm_common.AAA_MODE_VALID_VALUES: raise HttpError(400, "Invalid AAA mode") url = self._action_uri["aaa-mode"] data = {"aaa-mode": mode} content = self._request_server(rest.OP_PUT, url, json.dumps(data)) return json.loads(content)
def _authenticate(self, response=None, headers=None): if self._authn_type is None: return headers url = "%s://%s:%s%s" % (self._authn_protocol, self._authn_server, self._authn_port, self._authn_url) new_headers = headers or {} try: if self._ksinsecure: response = requests.post(url, data=self._authn_body, headers=self._DEFAULT_AUTHN_HEADERS, verify=False) elif not self._ksinsecure and self._use_ks_certs: response = requests.post(url, data=self._authn_body, headers=self._DEFAULT_AUTHN_HEADERS, verify=self._kscertbundle) else: response = requests.post(url, data=self._authn_body, headers=self._DEFAULT_AUTHN_HEADERS) except Exception as e: errmsg = 'Unable to connect to keystone for authentication. ' errmsg += 'Exception %s' %(e) raise RuntimeError(errmsg) if (response.status_code == 200) or (response.status_code == 201): # plan is to re-issue original request with new token if 'v2' in self._authn_url: authn_content = json.loads(response.text) self._auth_token = authn_content['access']['token']['id'] else: self._auth_token = response.headers['x-subject-token'] new_headers['X-AUTH-TOKEN'] = self._auth_token return new_headers else: raise RuntimeError('Authentication Failure')
def _request(self, op, url, data=None, retry_on_error=True, retry_after_authn=False, retry_count=30): retried = 0 if self._curl_logging: self._log_curl(op=op, url=url, data=data) while True: try: if op == rest.OP_GET: (status, content) = self._http_get(url, headers=self._headers, query_params=data) if status == 200: content = json.loads(content) elif op == rest.OP_POST: (status, content) = self._http_post(url, body=data, headers=self._headers) elif op == rest.OP_DELETE: (status, content) = self._http_delete(url, body=data, headers=self._headers) elif op == rest.OP_PUT: (status, content) = self._http_put(url, body=data, headers=self._headers) else: raise ValueError except ConnectionError: if not retry_on_error: raise ConnectionError time.sleep(1) self._create_api_server_session() continue if status == 200: return content # Exception Response, see if it can be resolved if (status == 401) and (not self._auth_token_input) and (not retry_after_authn): self._headers = self._authenticate(content, self._headers) # Recursive call after authentication (max 1 level) content = self._request(op, url, data=data, retry_after_authn=True) return content elif status == 404: raise NoIdError("Error: oper %s url %s body %s response %s" % (op, url, data, content)) elif status == 403: raise PermissionDenied(content) elif status == 412: raise OverQuota(content) elif status == 409: raise RefsExistError(content) elif status == 504: # Request sent to API server, but no response came within 50s raise TimeOutError("Gateway Timeout 504") elif status in [502, 503]: # 502: API server died after accepting request, so retry # 503: no API server available even before sending the request retried += 1 if retried >= retry_count: raise ServiceUnavailableError("Service Unavailable Timeout %d" % status) time.sleep(1) continue elif status == 400: raise BadRequest(status, content) else: # Unknown Error raise HttpError(status, content)
def _parse_homepage(self, json_body): py_obj = json.loads(json_body) srv_root_url = py_obj['href'] self._srv_root_url = srv_root_url for link in py_obj['links']: # strip base from *_url to get *_uri uri = link['link']['href'].replace(srv_root_url, '') if link['link']['rel'] == 'collection': class_name = "%s" % (utils.CamelCase(link['link']['name'])) cls = str_to_class(class_name) if not cls: continue cls.create_uri = uri elif link['link']['rel'] == 'resource-base': class_name = "%s" % (utils.CamelCase(link['link']['name'])) cls = str_to_class(class_name) if not cls: continue resource_type = link['link']['name'] cls.resource_uri_base[resource_type] = uri elif link['link']['rel'] == 'action': act_type = link['link']['name'] self._action_uri[act_type] = uri
def walk(self, fn): walk_results = [] type_to_sqa_objs_info = self.get_all_sqa_objs_info() for obj_type in type_to_sqa_objs_info: for sqa_obj_info in type_to_sqa_objs_info[obj_type]: self.cache_uuid_to_fq_name_add( sqa_obj_info[0], #uuid json.loads(sqa_obj_info[1]), #fqname obj_type) for obj_type in type_to_sqa_objs_info: uuid_list = [o[0] for o in type_to_sqa_objs_info[obj_type]] try: self.config_log('Resync: obj_type %s len %s' %(obj_type, len(uuid_list)), level=SandeshLevel.SYS_INFO) result = fn(obj_type, uuid_list) if result: walk_results.append(result) except Exception as e: self.config_log('Error in db walk invoke %s' %(str(e)), level=SandeshLevel.SYS_ERR) continue return walk_results
def set_aaa_mode(self, mode): if mode not in cfgm_common.AAA_MODE_VALID_VALUES: raise HttpError(400, 'Invalid AAA mode') url = self._action_uri['aaa-mode'] data = {'aaa-mode': mode} content = self._request_server(rest.OP_PUT, url, json.dumps(data)) return json.loads(content)
def _parse_homepage(self, json_body): py_obj = json.loads(json_body) srv_root_url = py_obj['href'] self._srv_root_url = srv_root_url for link in py_obj['links']: # strip base from *_url to get *_uri uri = link['link']['href'].replace(srv_root_url, '') if link['link']['rel'] == 'collection': cls = utils.obj_type_to_vnc_class(link['link']['name'], __name__) if not cls: continue cls.create_uri = uri elif link['link']['rel'] == 'resource-base': cls = utils.obj_type_to_vnc_class(link['link']['name'], __name__) if not cls: continue resource_type = link['link']['name'] cls.resource_uri_base[resource_type] = uri elif link['link']['rel'] == 'action': act_type = link['link']['name'] self._action_uri[act_type] = uri
def walk(self, fn): walk_results = [] type_to_sqa_objs_info = self.get_all_sqa_objs_info() for obj_type in type_to_sqa_objs_info: for sqa_obj_info in type_to_sqa_objs_info[obj_type]: self.cache_uuid_to_fq_name_add( sqa_obj_info[0], #uuid json.loads(sqa_obj_info[1]), #fqname obj_type) for obj_type in type_to_sqa_objs_info: uuid_list = [o[0] for o in type_to_sqa_objs_info[obj_type]] try: self.config_log('Resync: obj_type %s len %s' % (obj_type, len(uuid_list)), level=SandeshLevel.SYS_INFO) result = fn(obj_type, uuid_list) if result: walk_results.append(result) except Exception as e: self.config_log('Error in db walk invoke %s' % (str(e)), level=SandeshLevel.SYS_ERR) continue return walk_results
def get_filters(data, skips=None): """Extracts the filters of query parameters. Returns a dict of lists for the filters: check=a&check=b&name=Bob& becomes: {'check': [u'a', u'b'], 'name': [u'Bob']} 'data' contains filters in format: check==a,check==b,name==Bob """ skips = skips or [] res = {} if not data: return res for filter in data.split(','): key, value = filter.split('==') try: value = json.loads(value) except ValueError: pass if key in skips: continue values = list(set(res.get(key, [])) | set([value])) if values: res[key] = values return res
def update_last_modified(self, bch, obj_uuid, id_perms=None): if id_perms is None: id_perms = json.loads( self._obj_uuid_cf.get(obj_uuid, ['prop:id_perms'])['prop:id_perms']) id_perms['last_modified'] = datetime.datetime.utcnow().isoformat() self._update_prop(bch, obj_uuid, 'id_perms', {'id_perms': id_perms})
def ref_update(self, obj_type, obj_uuid, ref_type, ref_uuid, ref_fq_name, operation, attr=None): if ref_type.endswith('_refs'): ref_type = ref_type[:-5].replace('_', '-') json_body = json.dumps( { 'type': obj_type, 'uuid': obj_uuid, 'ref-type': ref_type, 'ref-uuid': ref_uuid, 'ref-fq-name': ref_fq_name, 'operation': operation, 'attr': attr }, default=self._obj_serializer_diff) uri = self._action_uri['ref-update'] try: content = self._request_server(rest.OP_POST, uri, data=json_body) except HttpError as he: if he.status_code == 404: return None raise he return json.loads(content)['uuid']
def _authenticate(self, response=None, headers=None): if self._authn_type is None: return headers url = "%s://%s:%s%s" % (self._authn_protocol, self._authn_server, self._authn_port, self._authn_url) new_headers = headers or {} try: if self._ksinsecure: response = requests.post(url, data=self._authn_body, headers=self._DEFAULT_AUTHN_HEADERS, verify=False) elif not self._ksinsecure and self._use_ks_certs: response = requests.post(url, data=self._authn_body, headers=self._DEFAULT_AUTHN_HEADERS, verify=self._kscertbundle) else: response = requests.post(url, data=self._authn_body, headers=self._DEFAULT_AUTHN_HEADERS) except Exception as e: raise RuntimeError('Unable to connect to keystone for authentication. Verify keystone server details') if (response.status_code == 200) or (response.status_code == 201): # plan is to re-issue original request with new token if 'v2' in self._authn_url: authn_content = json.loads(response.text) self._auth_token = authn_content['access']['token']['id'] else: self._auth_token = response.headers['x-subject-token'] new_headers['X-AUTH-TOKEN'] = self._auth_token return new_headers else: raise RuntimeError('Authentication Failure')
def lookup_service(self, service_type=None, service_id=None, include_count=False): try: col_name = ( 'service', service_id, ) if service_id else ('service', ) if service_type: services = self._disco_cf.get(service_type, column_start=col_name, column_finish=col_name, column_count=disc_consts.MAX_COL) data = [(service_type, services)] else: data = self._disco_cf.get_range(column_start=col_name, column_finish=col_name) # admin state is maintained seperately from rest of publisher info # thus need a seperate pass to merge them together data_dict = {} admin_state = {} for serv_type, services in data: for col_name, col_val in services.items(): (_, serv_id, tag) = col_name if tag == disc_consts.SERVICE_TAG: entry = json.loads(col_val) entry['in_use'] = 0 data_dict[(serv_type, serv_id)] = entry elif tag == disc_consts.ADMIN_STATE_TAG: admin_state[(serv_type, serv_id)] = col_val if not include_count: continue # get in-use count for each publisher. col_name = ('subscriber', ) try: # read all subs for a service type to minimize read requests subscribers = self._disco_cf.get( serv_type, column_start=col_name, column_finish=col_name, column_count=disc_consts.MAX_COL) for col, val in subscribers.items(): _, serv_id, client_id = col if (serv_type, serv_id) in data_dict: data_dict[(serv_type, serv_id)]['in_use'] += 1 except pycassa.NotFoundException: pass if len(data_dict) == 0: return None if service_id else [] for key, entry in data_dict.items(): entry['admin_state'] = admin_state.get(key, "up") data = [data_dict[key] for key in sorted(data_dict.iterkeys())] return data[0] if service_id else data except pycassa.NotFoundException: return None
def kv_retrieve(self, key=None): # if key is None, entire collection is retrieved, use with caution! # TODO move oper value to common json_body = json.dumps({'operation': 'RETRIEVE', 'key': key}) uri = self._action_uri['useragent-keyvalue'] content = self._request_server(rest.OP_POST, uri, data=json_body) return json.loads(content)['value']
def kv_retrieve(self, key=None): # if key is None, entire collection is retrieved, use with caution! # TODO move oper value to common json_body = json.dumps({"operation": "RETRIEVE", "key": key}) uri = self._action_uri["useragent-keyvalue"] content = self._request_server(rest.OP_POST, uri, data=json_body) return json.loads(content)["value"]
def obj_perms(self, token, obj_uuid=None): query = 'token=%s' % token if obj_uuid: query += '&uuid=%s' % obj_uuid try: rv = self._request_server(rest.OP_GET, "/obj-perms", data=query) return json.loads(rv) except PermissionDenied: return None
def _find_url(self, json_body, resource_name): rname = unicode(resource_name) py_obj = json.loads(json_body) pprint.pprint(py_obj) for link in py_obj["links"]: if link["link"]["name"] == rname: return link["link"]["href"] return None
def _prop_collection_get(self, obj_uuid, obj_field, position): uri = self._action_uri['prop-collection-get'] query_params = {'uuid': obj_uuid, 'fields': obj_field} if position: query_params['position'] = position content = self._request_server(rest.OP_GET, uri, data=query_params) return json.loads(content)[obj_field]
def lookup_client(self, service_type, client_id): try: datastr, stat = self.read_node( '/clients/%s/%s' % (service_type, client_id)) data = json.loads(datastr) if datastr else None except ValueError: self.syslog('raise ValueError st=%s, cid=%s' %(service_type, client_id)) data = None return data
def service_entries(self): service_types = self.get_children('/services') for service_type in service_types: services = self.get_children('/services/%s' % (service_type)) for service_id in services: data, stat = self.read_node( '/services/%s/%s' % (service_type, service_id)) entry = json.loads(data) yield(entry)
def _find_url(self, json_body, resource_name): rname = unicode(resource_name) py_obj = json.loads(json_body) pprint.pprint(py_obj) for link in py_obj['links']: if link['link']['name'] == rname: return link['link']['href'] return None
def _prop_collection_get(self, obj_uuid, obj_field, position): uri = self._action_uri["prop-collection-get"] query_params = {"uuid": obj_uuid, "fields": obj_field} if position: query_params["position"] = position content = self._request_server(rest.OP_GET, uri, data=query_params) return json.loads(content)[obj_field]
def service_entries(self): service_types = self.get_children('/services') for service_type in service_types: services = self.get_children('/services/%s' % (service_type)) for service_id in services: data, stat = self.read_node('/services/%s/%s' % (service_type, service_id)) entry = json.loads(data) yield (entry)
def get_all_clients(self, service_type=None, service_id=None): r = [] entry_format_subscriber = False if service_type and service_id: # ('subscriber', service_id, client_id) col_name = ( 'subscriber', service_id, ) try: clients = self._disco_cf.get(service_type, column_start=col_name, column_finish=col_name, column_count=disc_consts.MAX_COL) except pycassa.NotFoundException: return None data = [(service_type, dict(clients))] entry_format_subscriber = True elif service_type: col_name = ('client', ) try: clients = self._disco_cf.get(service_type, column_start=col_name, column_finish=col_name, column_count=disc_consts.MAX_COL) except pycassa.NotFoundException: return None data = [(service_type, dict(clients))] else: col_name = ('client', ) try: data = self._disco_cf.get_range( column_start=col_name, column_finish=col_name, column_count=disc_consts.MAX_COL) except pycassa.NotFoundException: return None for service_type, clients in data: rr = [] for col_name in clients: if entry_format_subscriber: (_, service_id, client_id) = col_name else: (_, client_id, service_id) = col_name # skip pure client entry if service_id == disc_consts.CLIENT_TAG: continue entry_str = clients[col_name] entry = json.loads(entry_str) rr.append((service_type, client_id, service_id, entry['mtime'], entry['ttl'])) # sort by modification time # rr = sorted(rr, key=lambda entry: entry[3]) r.extend(rr) return r
def _prop_collection_get(self, obj_uuid, obj_field, position): uri = self._action_uri['prop-collection-get'] query_params = {'uuid': obj_uuid, 'fields': obj_field} if position: query_params['position'] = position content = self._request_server( rest.OP_GET, uri, data=query_params) return json.loads(content)[obj_field]
def lookup_subscription(self, service_type, client_id=None, service_id=None, include_meta=False): if not self.exists_node('/clients/%s' % (service_type)): return None if client_id and service_id: try: datastr, stat = self.read_node( '/clients/%s/%s/%s' % (service_type, client_id, service_id)) data = json.loads(datastr) blob = data['blob'] if include_meta: return (blob, stat, data['ttl']) else: return blob except kazoo.exceptions.NoNodeException: return None elif client_id: # our version of Kazoo doesn't support include_data :-( try: services = self.get_children('/clients/%s/%s' % (service_type, client_id)) r = [] for service_id in services: datastr, stat = self.read_node( '/clients/%s/%s/%s' % (service_type, client_id, service_id)) if datastr: data = json.loads(datastr) blob = data['blob'] r.append((service_id, blob, stat)) # sort services in the order of assignment to this client # (based on modification time) rr = sorted(r, key=lambda entry: entry[2].last_modified) return [(service_id, blob) for service_id, blob, stat in rr] except kazoo.exceptions.NoNodeException: return None else: clients = self.get_children('/clients/%s' % (service_type)) return clients
def lookup_client(self, service_type, client_id): try: datastr, stat = self.read_node('/clients/%s/%s' % (service_type, client_id)) data = json.loads(datastr) if datastr else None except ValueError: self.syslog('raise ValueError st=%s, cid=%s' % (service_type, client_id)) data = None return data
def ifmap_to_id(self, ifmap_id): json_body = json.dumps({'ifmap_id': ifmap_id}) uri = self._action_uri['ifmap-to-id'] try: content = self._request_server(rest.OP_POST, uri, data=json_body) except HttpError as he: if he.status_code == 404: return None return json.loads(content)['uuid']
def _object_create(self, res_type, obj): obj_type = res_type.replace('-', '_') obj_cls = get_object_class(res_type) obj._pending_field_updates |= obj._pending_ref_updates obj._pending_ref_updates = set([]) # Ignore fields with None value in json representation # encode props + refs in object body obj_json_param = json.dumps(obj, default=self._obj_serializer) json_body = '{"%s":%s}' % (res_type, obj_json_param) content = self._request_server(rest.OP_POST, obj_cls.create_uri, data=json_body) obj_dict = json.loads(content)[res_type] obj.uuid = obj_dict['uuid'] if 'parent_uuid' in obj_dict: obj.parent_uuid = obj_dict['parent_uuid'] obj.set_server_conn(self) # encode any prop-<list|map> operations and # POST on /prop-collection-update prop_coll_body = {'uuid': obj.uuid, 'updates': []} operations = [] for prop_name in obj._pending_field_list_updates: operations.extend(obj._pending_field_list_updates[prop_name]) for prop_name in obj._pending_field_map_updates: operations.extend(obj._pending_field_map_updates[prop_name]) for oper, elem_val, elem_pos in operations: if isinstance(elem_val, GeneratedsSuper): serialized_elem_value = elem_val.exportDict('') else: serialized_elem_value = elem_val prop_coll_body['updates'].append({ 'field': prop_name, 'operation': oper, 'value': serialized_elem_value, 'position': elem_pos }) # all pending fields picked up obj.clear_pending_updates() if prop_coll_body['updates']: prop_coll_json = json.dumps(prop_coll_body) self._request_server(rest.OP_POST, self._action_uri['prop-collection-update'], data=prop_coll_json) return obj.uuid
def locate(cls, fq_name=None, uuid=None, create_it=True, **kwargs): if fq_name is not None and uuid is None: try: uuid = cls.db_conn.fq_name_to_uuid(cls.object_type, fq_name) except NoIdError as e: if create_it: pass else: return False, (404, str(e)) if uuid: try: ok, result = cls.db_conn.dbe_read( cls.object_type, uuid, obj_fields=kwargs.get('fields')) except NoIdError as e: if create_it: pass else: return False, (404, str(e)) if not ok: return False, result else: return ok, result # Does not exist, create it. Need at least an fq_name if fq_name is None or fq_name == []: msg = ("Cannot create %s without at least a FQ name" % cls.object_type.replace('_', ' ').title()) return False, (400, msg) parent_obj = None if kwargs.get('parent_type') is not None: parent_class = cls.server.get_resource_class(kwargs['parent_type']) parent_obj = parent_class(fq_name=fq_name[:-1]) parent_obj.uuid = kwargs.get('parent_uuid') obj = cls(parent_obj=parent_obj, **kwargs) obj.fq_name = fq_name obj.uuid = kwargs.get('uuid') obj_dict = json.loads(json.dumps(obj, default=_obj_serializer_all)) for ref_name in cls.ref_fields & set(kwargs.keys()): obj_dict[ref_name] = copy.deepcopy(kwargs[ref_name]) try: cls.server.internal_request_create(cls.resource_type, obj_dict) except HttpError as e: if e.status_code != 409: return False, (e.status_code, e.content) else: # Ignore the refsExistError. cls.db_conn.config_log('Object ' '%s uuid %s already been created.' % (' '.join(fq_name), uuid), level=SandeshLevel.SYS_DEBUG) try: uuid = cls.db_conn.fq_name_to_uuid(cls.object_type, fq_name) except NoIdError as e: return False, (404, str(e)) return cls.db_conn.dbe_read(cls.object_type, obj_id=uuid)
def prop_collection_read(self, obj_type, obj_uuid, obj_fields, position): obj_class = self._get_resource_class(obj_type) result = {} # always read-in id-perms for upper-layers to do rbac/visibility try: col_name = 'prop:id_perms' obj_cols = self._obj_uuid_cf.get(obj_uuid, columns=[col_name], column_count=self._MAX_COL) result['id_perms'] = json.loads(obj_cols[col_name]) except pycassa.NotFoundException: raise NoIdError(obj_uuid) # read in prop-list or prop-map fields for field in obj_fields: if field in obj_class.prop_list_fields: prop_pfx = 'propl' elif field in obj_class.prop_map_fields: prop_pfx = 'propm' if position: col_start = '%s:%s:%s' %(prop_pfx, field, position) col_end = '%s:%s:%s' %(prop_pfx, field, position) else: col_start = '%s:%s:' %(prop_pfx, field) col_end = '%s:%s;' %(prop_pfx, field) obj_cols = self._obj_uuid_cf.get(obj_uuid, column_start=col_start, column_finish=col_end, column_count=self._MAX_COL, include_timestamp=True) result[field] = [] for col_name in obj_cols.keys(): # tuple of col_value, position. result is already sorted # lexically by position result[field].append( (json.loads(obj_cols[col_name][0]), col_name.split(':')[-1]) ) return (True, result)
def fq_name_to_id(self, obj_type, fq_name): json_body = json.dumps({'type': obj_type, 'fq_name': fq_name}) uri = self._action_uri['name-to-id'] try: content = self._request_server(rest.OP_POST, uri, data=json_body) except HttpError as he: if he.status_code == 404: return None raise he return json.loads(content)['uuid']
def lookup_client(self, service_type, client_id, subs = False): r = [] col_name = ('client', client_id, ) try: subs = self._disco_cf.get(service_type, column_start = col_name, column_finish = col_name, include_timestamp = True) # sort columns by timestamp (subs is array of (col_name, (value, timestamp))) subs = sorted(subs.items(), key=lambda entry: entry[1][1]) # col_name = (client, cliend_id, service_id) # col_val = (real-value, timestamp) for col_name, col_val in subs: foo, client_id, service_id = col_name if service_id == disc_consts.CLIENT_TAG: data = json.loads(col_val[0]) continue entry = json.loads(col_val[0]) r.append((col_name[2], entry['blob'])) return (data, r) except pycassa.NotFoundException: return (None, [])
def _object_create(self, res_type, obj): obj_type = res_type.replace('-', '_') obj_cls = get_object_class(res_type) obj._pending_field_updates |= obj._pending_ref_updates obj._pending_ref_updates = set([]) # Ignore fields with None value in json representation # encode props + refs in object body obj_json_param = json.dumps(obj, default=self._obj_serializer) json_body = '{"%s":%s}' %(res_type, obj_json_param) content = self._request_server(rest.OP_POST, obj_cls.create_uri, data=json_body) obj_dict = json.loads(content)[res_type] obj.uuid = obj_dict['uuid'] if 'parent_uuid' in obj_dict: obj.parent_uuid = obj_dict['parent_uuid'] obj.set_server_conn(self) # encode any prop-<list|map> operations and # POST on /prop-collection-update prop_coll_body = {'uuid': obj.uuid, 'updates': []} operations = [] for prop_name in obj._pending_field_list_updates: operations.extend(obj._pending_field_list_updates[prop_name]) for prop_name in obj._pending_field_map_updates: operations.extend(obj._pending_field_map_updates[prop_name]) for oper, elem_val, elem_pos in operations: if isinstance(elem_val, GeneratedsSuper): serialized_elem_value = elem_val.exportDict('') else: serialized_elem_value = elem_val prop_coll_body['updates'].append( {'field': prop_name, 'operation': oper, 'value': serialized_elem_value, 'position': elem_pos}) # all pending fields picked up obj.clear_pending_updates() if prop_coll_body['updates']: prop_coll_json = json.dumps(prop_coll_body) self._request_server(rest.OP_POST, self._action_uri['prop-collection-update'], data=prop_coll_json) return obj.uuid
def lookup_subscription(self, service_type, client_id=None, service_id=None, include_meta=False): if not self.exists_node('/clients/%s' % (service_type)): return None if client_id and service_id: try: datastr, stat = self.read_node( '/clients/%s/%s/%s' % (service_type, client_id, service_id)) data = json.loads(datastr) blob = data['blob'] if include_meta: return (blob, stat, data['ttl']) else: return blob except kazoo.exceptions.NoNodeException: return None elif client_id: # our version of Kazoo doesn't support include_data :-( try: services = self.get_children( '/clients/%s/%s' % (service_type, client_id)) r = [] for service_id in services: datastr, stat = self.read_node( '/clients/%s/%s/%s' % (service_type, client_id, service_id)) if datastr: data = json.loads(datastr) blob = data['blob'] r.append((service_id, blob, stat)) # sort services in the order of assignment to this client # (based on modification time) rr = sorted(r, key=lambda entry: entry[2].last_modified) return [(service_id, blob) for service_id, blob, stat in rr] except kazoo.exceptions.NoNodeException: return None else: clients = self.get_children('/clients/%s' % (service_type)) return clients
def validate_perms_rbac(self, request, obj_uuid, mode=PERMS_R): err_msg = (403, 'Permission Denied') # retrieve object and permissions try: config = self._server_mgr._db_conn.uuid_to_obj_dict(obj_uuid) perms2 = json.loads(config.get('prop:perms2')) obj_name = config.get("fq_name") obj_type = config.get("type") except NoIdError: return (True, '') user, roles = self.get_user_roles(request) is_admin = self.cloud_admin_role in [x.lower() for x in roles] if is_admin: return (True, 'RWX') env = request.headers.environ tenant = env.get('HTTP_X_PROJECT_ID', None) tenant_name = env.get('HTTP_X_PROJECT_NAME', '*') if tenant is None: return (False, err_msg) owner = perms2['owner'] perms = perms2['owner_access'] << 6 perms |= perms2['global_access'] # build perms mask = 07 if tenant == owner: mask |= 0700 share = perms2['share'] tenants = [item['tenant'] for item in share] for item in share: if tenant == item['tenant']: perms = perms | item['tenant_access'] << 3 mask |= 0070 break mode_mask = mode | mode << 3 | mode << 6 ok = (mask & perms & mode_mask) granted = ok & 07 | (ok >> 3) & 07 | (ok >> 6) & 07 msg = '%s (%s:%s) %s %s admin=%s, mode=%03o mask=%03o perms=%03o, \ (usr=%s(%s)/own=%s/sh=%s)' \ % ('+++' if ok else '---', self.mode_str[mode], obj_uuid, obj_type, obj_name, 'yes' if is_admin else 'no', mode_mask, mask, perms, tenant, tenant_name, owner, tenants) self._server_mgr.config_log(msg, level=SandeshLevel.SYS_DEBUG) return (True, self.mode_str[granted]) if ok else (False, err_msg)
def ref_relax_for_delete(self, obj_uuid, ref_uuid): # don't account for reference of <obj_uuid> in delete of <ref_uuid> in future json_body = json.dumps({'uuid': obj_uuid, 'ref-uuid': ref_uuid}) uri = self._action_uri['ref-relax-for-delete'] try: content = self._request_server(rest.OP_POST, uri, data=json_body) except HttpError as he: if he.status_code == 404: return None raise he return json.loads(content)['uuid']