def test_get_api_client(self): client = config.get_api_client() self.assertEqual(client.host, 'localhost') self.assertEqual(client.username, 'admin') self.assertEqual(client.password, 'unittest') self.assertEqual(client.protocol, "https") self.assertEqual(client.ssl_verify, True)
def api_broker(klass): client = config.get_api_client() return webui_broker.IssueAdhocBroker(host=client.host, login=client.username, password=client.password, proto=client.protocol, ssl_verify=client.ssl_verify)
def get_broker(klass): """ klass.api_broker can be either callable or string. If it's a string, we use it as broker for infoblox_netmri. If it's callbale, we use its return value as API broker """ if callable(klass.api_broker): return klass.api_broker() client = config.get_api_client() return client.get_broker(klass.api_broker)
def __init__(self, id=None, blob=None, error=None, **api_metadata): self.client = config.get_api_client() self._broker = None self.id = id if blob is not None: self._blob = blob self.path = blob.path else: self._blob = None self.path = None self.error = error self.updated_at = api_metadata.get("updated_at", None) self.set_metadata(api_metadata)
class ApiObject(): client = config.get_api_client() api_broker = None # Lists all attributes that should be received from api api_attributes = () # Lists all attributes that are unique on netmri (such as name) secondary_keys = () def __init__(self, id=None, blob=None, error=None, **api_metadata): self.broker = self.get_broker() self.id = id if blob is not None: self._blob = blob self.path = blob.path else: self._blob = None self.path = None self.error = error self.updated_at = api_metadata.get("updated_at", None) self.set_metadata(api_metadata) def get_metadata(self): res = {} if self.id is not None: res['id'] = self.id if self.updated_at is not None: res['updated_at'] = self.updated_at for attr in self.api_attributes: res[attr] = getattr(self, attr) return res def set_metadata(self, metadata): logger.debug( f"setting metadata for instance of {self.__class__.__name__} with {metadata}" ) if 'id' in metadata: self.id = metadata['id'] if 'updated_at' in metadata: self.updated_at = metadata['updated_at'] if 'error' in metadata: self.updated_at = metadata['error'] for attr in self.api_attributes: # Don't replace existing attributes if we only got partial metadata # (can happen if we parse metadata block) # If you want to unset an attribute, set it to None explicitly if attr not in metadata and getattr(self, attr, None) is not None: continue value = metadata.get(attr, None) setattr(self, attr, value) @classmethod def get_broker(klass): return klass.client.get_broker(klass.api_broker) @classmethod def scripts_dir(klass): path = config.get_config().class_paths.get(klass.__name__, None) if path is None: raise ValueError( f"Cannot determine repo path for {klass.__name__}") return path @classmethod def _get_subclass_by_path(klass, path): subclass_name = None for cls, cls_path in config.get_config().class_paths.items(): if path.startswith(cls_path): subclass_name = cls break the_module = importlib.import_module(klass.__module__) if subclass_name is None or subclass_name not in dir(the_module): raise ValueError(f"Cannot find subclass for path {path}") return getattr(the_module, subclass_name) @classmethod # Create object from XXXRemote def from_api(klass, remote): logger.debug(f"creating {klass.__name__} from {remote.__class__}") item_dict = {} item_dict["id"] = remote.id item_dict["updated_at"] = remote.updated_at for attr in klass.api_attributes: item_dict[attr] = getattr(remote, attr, None) logger.debug(f"creating {klass.api_broker} object from {item_dict}") return klass(**item_dict) @classmethod def from_blob(klass, blob): if klass.__name__ == "ApiObject": klass = klass._get_subclass_by_path(blob.path) logger.debug(f"creating {klass.__name__} from {blob.path}") item_dict = {} note = blob.find_note_on_ancestors() if note.content is not None: item_dict = dict(**(note.content)) # poor man's deepcopy item_dict['blob'] = blob item_dict['path'] = blob.path logger.debug(f"setting attributes from {item_dict}") res = klass(**item_dict) res.load_content_from_repo() # This will update metadata values from note with ones from content itself # Note that we don't update git note here. It will be done on api push, if necessary res.set_metadata_from_content() return res def load_content_from_api(self): raise NotImplementedError( f"Class {self.__class__} must implement load_content_from_api") def load_content_from_repo(self): logger.debug( f"loading content for {self.api_broker} from {self._blob.path}") self._content = self._blob.get_content() def delete_on_server(self): broker = self.get_broker() logger.info( f"DEL {self.api_broker} {self.name} (id {self.id}) [{self.path}]") logger.debug(f"calling {self.api_broker}.destroy with id {self.id}") check_dryrun(broker.destroy)(id=self.id) def push_to_api(self): # TODO: We need to check that the object is in clean state # (i. e. content and metatada properties are same as in repo) if self._content is None: if self.path is None: raise ValueError(f"There is no such file in the repository") else: raise ValueError(f"Content for {self.path} is not loaded") if self.id is None: logger.info( f"{self.path} -> {self.api_broker} \"{self.name}\" NEW") else: logger.info( f"{self.path} -> {self.api_broker} \"{self.name}\" (id {self.id})" ) try: api_result = self._do_push_to_api() if api_result is None and get_dryrun: # No point in updating the object if dry run is enabled return None item_dict = {} item_dict["id"] = api_result.id item_dict["updated_at"] = api_result.updated_at for attr in self.api_attributes: item_dict[attr] = getattr(api_result, attr, None) logger.debug( f"Updating object attributes with API result {item_dict}") self.set_metadata(item_dict) self.error = None except Exception as e: self.error = self._parse_error(e) logger.error( f"An error has occured while syncing {self.path}: {self.error}" ) self.save_note() # _do_push_to_api must be defined in a subclass and must return XXXRemote object. # In some cases, this method must call self.get_broker().show(id=received_id) to obtain necessary metadata @check_dryrun def _do_push_to_api(self): raise NotImplementedError( f"Class {self.__class__} must implement _do_push_to_api") # This must be overridden in a subclass def set_metadata_from_content(self): raise NotImplementedError( f"Class {self.__class__} must implement set_metadata_from_content") def get_full_path(self): # TODO: find path by broker and id if no path is provided pass # TODO: must create git blobs instead of files so it'll work on bare repo # See https://git-scm.com/book/en/v2/Git-Internals-Git-Objects @check_dryrun def save_to_disk(self): conf = config.get_config() os.makedirs(os.path.join(conf.scripts_root, self.scripts_dir()), exist_ok=True) fn = os.path.join(conf.scripts_root, self.generate_path()) logger.info( f"{self.api_broker} \"{self.name}\" (id {self.id}) -> {self.path}") with open(fn, 'w') as f: f.write(self._content) return fn @check_dryrun def save_note(self): self._blob.note = self.get_note() def get_note(self): return { "id": self.id, "path": self.path, "updated_at": self.updated_at, "blob": self._blob.id, "class": self.__class__.__name__, "error": self.error } # Some objects, like scripts, have subcategories. These categories are represented as subdirs def get_subpath(self): return '' def get_extension(self): return '' def generate_path(self): if self.path is None: # Name must be unique, so it is safe filename = getattr(self, self.secondary_keys[0], str(self.id)) filename = re.sub("[^A-Za-z0-9_\-.]", "_", filename) extension = self.get_extension() filename = '.'.join([filename, extension]) self.path = os.path.join(self.scripts_dir(), self.get_subpath(), filename) return self.path def find_by_secondary_keys(self): args = {} for key in self.secondary_keys: args[f"op_{key}"] = "=" args[f"val_c_{key}"] = getattr(self, key) logger.debug(f"Executing {self.api_broker}.find with {args}") return self.broker.find(**args) @staticmethod def _parse_error(e): msg = str(e) if isinstance(e, exceptions.RequestException): msg = e.response.content try: # NetMRI returns errors in JSON msg_dict = json.loads(msg) message = msg_dict['message'] field_msgs = [] if 'fields' in msg_dict: for field, val in msg_dict['fields'].items(): field_msg = field + ': ' + " ".join(val) field_msgs.append(field_msg) if field_msgs: message = message + ': ' + ', '.join(field_msgs) return message except json.JSONDecodeError: pass except KeyError: pass # If error is not in json, return it as is return msg