Ejemplo n.º 1
0
    def dbe_obj_read(self, resource_type, resource_id, obj_fields):
        resource_oper = None
        try:
            if "all" in obj_fields:
                ok, obj_dict = self._db_conn.dbe_read(
                    resource_type,
                    resource_id,
                )
            else:
                obj_fields = list(set(obj_fields))
                ok, obj_dict = self._db_conn.dbe_read(resource_type,
                                                      resource_id,
                                                      obj_fields=obj_fields)

            if not ok:
                resource_oper = "STOP"
                obj_dict = {"error": "dbe_read failure: %s" % obj_dict}

        except NoIdError:
            err_msg = "Resource with id %s already deleted at the \
                point of dbe_read" % resource_id
            err_msg += detailed_traceback()
            self.config_log(err_msg, level=SandeshLevel.SYS_NOTICE)
            resource_oper = OP_DELETE
            obj_dict = {"uuid": resource_id}
        except Exception as e:
            err_msg = "Exception %s while performing dbe_read" % e
            err_msg += detailed_traceback()
            self.config_log(err_msg, level=SandeshLevel.SYS_NOTICE)
            resource_oper = "STOP"
            obj_dict = {"error": str(e)}
        return resource_oper, obj_dict
        def wrapper(*args, **kwargs):
            if ((sys._getframe(1).f_code.co_name not in
                     ['_Multiget', 'multiget']) and
                     func.__name__ in ['get', 'multiget']):
                msg = ("It is not recommended to use 'get' or 'multiget' "
                       "pycassa methods. It's better to use 'xget' or "
                       "'get_range' methods due to thrift limitations")
                self.options.logger(msg, level=SandeshLevel.SYS_WARN)
            try:
                if self.get_status() != ConnectionStatus.UP:
                    # will set conn_state to UP if successful
                    self._cassandra_init_conn_pools()

                self.start_time = datetime.datetime.now()
                return func(*args, **kwargs)
            except (pycassa.pool.AllServersUnavailable, pycassa.pool.MaximumRetryException) as e:
                if self.get_status() != ConnectionStatus.DOWN:
                    self.report_status_down()
                    msg = 'Cassandra connection down. Exception in %s' % (
                        str(func))
                    self.options.logger(msg, level=SandeshLevel.SYS_ERR)
                raise DatabaseUnavailableError(
                    'Error, %s: %s' % (str(e), utils.detailed_traceback()))

            finally:
                if ((self.log_response_time) and (oper)):
                    self.end_time = datetime.datetime.now()
                    self.log_response_time(self.end_time - self.start_time, oper)
Ejemplo n.º 3
0
 def _ifmap_dequeue_task(self):
     while True:
         try:
             self._publish_to_ifmap_dequeue()
         except Exception as e:
             tb = detailed_traceback()
             self.config_log(tb, level=SandeshLevel.SYS_ERR)
Ejemplo n.º 4
0
 def _ifmap_dequeue_task(self):
     while True:
         try:
             self._publish_to_ifmap_dequeue()
         except Exception as e:
             tb = detailed_traceback()
             self.config_log(tb, level=SandeshLevel.SYS_ERR)
Ejemplo n.º 5
0
    def dispatch(self):
        while True:
            notification = self._notification_queue.get()
            resource_type = notification.get("type")
            resource_oper = notification.get("oper")
            resource_id = notification.get("uuid")
            resource_data = {}
            if not self._client_queues.get(resource_type, None):
                # No clients subscribed for this resource type
                continue
            if resource_oper == "DELETE":
                resource_data = {"uuid": resource_id}
            else:
                try:
                    ok, resource_data = self._db_conn.dbe_read(
                        resource_type, resource_id)
                    if not ok:
                        resource_oper = "STOP"
                        resource_data = {
                            "error": "dbe_read failure: %s" % resource_data
                        }
                except NoIdError:
                    err_msg = "Resource with id %s already deleted at the \
                        point of dbe_read" % resource_id
                    err_msg += detailed_traceback()
                    self.config_log(err_msg, level=SandeshLevel.SYS_NOTICE)
                    resource_oper = "DELETE"
                    resource_data = {"uuid": resource_id}
                except Exception as e:
                    err_msg = "Exception %s while performing dbe_read" % e
                    err_msg += detailed_traceback()
                    self.config_log(err_msg, level=SandeshLevel.SYS_NOTICE)
                    resource_oper = "STOP"
                    resource_data = {"error": str(e)}

            event = self.pack(event=resource_oper,
                              data={resource_type: resource_data})
            for client_queue in self._client_queues.get(resource_type, []):
                client_queue.put_nowait(event)
Ejemplo n.º 6
0
 def wrapper(*args, **kwargs):
     try:
         if self.get_status() != ConnectionStatus.UP:
             self._Init_Cluster()
         self.start_time = datetime.datetime.now()
         return func(*args, **kwargs)
     except (connector.InvalidRequest,
             connector.cluster.NoHostAvailable,
             connector.cqlengine.CQLEngineException) as e:
         if self.get_status() != ConnectionStatus.DOWN:
             self.report_status_down()
             self.options.logger(
                 "Cassandra connection down. Exception in {}".format(
                     func),
                 level=SandeshLevel.SYS_ERR)
         raise DatabaseUnavailableError("error, {}: {}".format(
             e, utils.detailed_traceback()))
     finally:
         if ((self.log_response_time) and (oper)):
             self.end_time = datetime.datetime.now()
             self.log_response_time(self.end_time - self.start_time,
                                    oper)
    def _load_init_data(self):
        """
        Load init data for job playbooks.

        This function loads init data from a data file specified by the
        argument '--fabric_ansible_dir' to the database. The data file
        must be in JSON format and follow the format below

        "my payload": {
            "object_type": "tag"
            "objects": [
                {
                    "fq_name": [
                        "fabric=management_ip"

                    ],
                    "name": "fabric=management_ip",
                    "tag_type_name": "fabric",
                    "tag_value": "management_ip"

                }

            ]

        }
        """
        try:
            json_data = self._load_json_data()
            if json_data is None:
                self._logger.error('Unable to load init data')
                return

            for item in json_data.get("data"):
                object_type = item.get("object_type")

                # Get the class name from object type
                cls_name = CamelCase(object_type)
                # Get the class object
                cls_ob = str_to_class(cls_name, resource_client.__name__)

                # saving the objects to the database
                for obj in item.get("objects"):
                    instance_obj = cls_ob.from_dict(**obj)

                    # create/update the object
                    fq_name = instance_obj.get_fq_name()
                    try:
                        uuid = self._vnc_api.fq_name_to_id(
                            object_type, fq_name)
                        if object_type == "tag":
                            continue
                        instance_obj.set_uuid(uuid)
                        # Update config json inside role-config object
                        if object_type == 'role-config':
                            role_config_obj = self._vnc_api.\
                                role_config_read(id=uuid)
                            cur_config_json = json.loads(
                                role_config_obj.get_role_config_config())
                            def_config_json = json.loads(
                                instance_obj.get_role_config_config())
                            def_config_json.update(cur_config_json)
                            instance_obj.set_role_config_config(
                                json.dumps(def_config_json))
                        if object_type != 'telemetry-profile' and \
                                object_type != 'sflow-profile':
                            self._vnc_api._object_update(
                                object_type, instance_obj)
                    except NoIdError:
                        self._vnc_api._object_create(object_type, instance_obj)

            for item in json_data.get("refs"):
                from_type = item.get("from_type")
                from_fq_name = item.get("from_fq_name")
                from_uuid = self._vnc_api.fq_name_to_id(
                    from_type, from_fq_name)

                to_type = item.get("to_type")
                to_fq_name = item.get("to_fq_name")
                to_uuid = self._vnc_api.fq_name_to_id(to_type, to_fq_name)

                self._vnc_api.ref_update(from_type, from_uuid, to_type,
                                         to_uuid, to_fq_name, 'ADD')
        except Exception as e:
            err_msg = 'error while loading init data: %s\n' % str(e)
            err_msg += detailed_traceback()
            self._logger.error(err_msg)

        # - fetch list of all the physical routers
        # - check physical and overlay role associated with each PR
        # - create ref between physical_role and physical_router object,
        # if PR is assigned with a specific physical role
        # - create ref between overlay_roles and physical_router object,
        # if PR is assigned with specific overlay roles
        obj_list = self._vnc_api._objects_list('physical-router')
        pr_list = obj_list.get('physical-routers')
        for pr in pr_list or []:
            try:
                pr_obj = self._vnc_api.\
                    physical_router_read(id=pr.get('uuid'))
                physical_role = pr_obj.get_physical_router_role()
                overlay_roles = pr_obj.get_routing_bridging_roles()
                if overlay_roles is not None:
                    overlay_roles = overlay_roles.get_rb_roles()
                if physical_role:
                    try:
                        physical_role_uuid = self._vnc_api.\
                            fq_name_to_id('physical_role',
                                          ['default-global-system-config',
                                           physical_role])
                        if physical_role_uuid:
                            self._vnc_api.ref_update('physical-router',
                                                     pr.get('uuid'),
                                                     'physical-role',
                                                     physical_role_uuid, None,
                                                     'ADD')
                    except NoIdError:
                        pass
                if overlay_roles:
                    for overlay_role in overlay_roles or []:
                        try:
                            overlay_role_uuid = self._vnc_api.\
                                fq_name_to_id('overlay_role',
                                              ['default-global-system-config',
                                               overlay_role.lower()])
                            if overlay_role_uuid:
                                self._vnc_api.ref_update(
                                    'physical-router', pr.get('uuid'),
                                    'overlay-role', overlay_role_uuid, None,
                                    'ADD')
                        except NoIdError:
                            pass
            except NoIdError:
                pass

        # handle deleted job_templates as part of in-place cluster update
        to_be_del_jt_names = [
            'show_interfaces_template', 'show_config_interfaces_template',
            'show_interfaces_by_names_template'
        ]
        for jt_name in to_be_del_jt_names:
            try:
                self._vnc_api.job_template_delete(
                    fq_name=['default-global-system-config', jt_name])
            except NoIdError:
                pass
Ejemplo n.º 8
0
    def _load_init_data(self):
        """
        Load init data for job playbooks.

        This function loads init data from a data file specified by the
        argument '--fabric_ansible_dir' to the database. The data file
        must be in JSON format and follow the format below

        "my payload": {
            "object_type": "tag"
            "objects": [
                {
                    "fq_name": [
                        "fabric=management_ip"

                    ],
                    "name": "fabric=management_ip",
                    "tag_type_name": "fabric",
                    "tag_value": "management_ip"

                }

            ]

        }
        """
        try:
            json_data = self._load_json_data()
            if json_data is None:
                self._logger.error('Unable to load init data')
                return

            for item in json_data.get("data"):
                object_type = item.get("object_type")

                # Get the class name from object type
                cls_name = CamelCase(object_type)
                # Get the class object
                cls_ob = str_to_class(cls_name, resource_client.__name__)

                # saving the objects to the database
                for obj in item.get("objects"):
                    instance_obj = cls_ob.from_dict(**obj)

                    # create/update the object
                    fq_name = instance_obj.get_fq_name()
                    try:
                        uuid_id = self._vnc_api.fq_name_to_id(
                            object_type, fq_name)
                        if object_type == "tag":
                            continue
                        instance_obj.set_uuid(uuid_id)
                        # Update config json inside role-config object
                        if object_type == 'role-config':
                            role_config_obj = self._vnc_api.\
                                role_config_read(id=uuid_id)
                            cur_config_json = json.loads(
                                role_config_obj.get_role_config_config())
                            def_config_json = json.loads(
                                instance_obj.get_role_config_config())
                            def_config_json.update(cur_config_json)
                            instance_obj.set_role_config_config(
                                json.dumps(def_config_json))

                        if object_type != 'telemetry-profile' and \
                                object_type != 'sflow-profile' and \
                                object_type != 'device-functional-group':
                            self._vnc_api._object_update(
                                object_type, instance_obj)

                    except NoIdError:
                        self._vnc_api._object_create(object_type, instance_obj)

            for item in json_data.get("refs"):
                from_type = item.get("from_type")
                from_fq_name = item.get("from_fq_name")
                from_uuid = self._vnc_api.fq_name_to_id(
                    from_type, from_fq_name)

                to_type = item.get("to_type")
                to_fq_name = item.get("to_fq_name")
                to_uuid = self._vnc_api.fq_name_to_id(to_type, to_fq_name)

                self._vnc_api.ref_update(from_type, from_uuid, to_type,
                                         to_uuid, to_fq_name, 'ADD')
        except Exception as e:
            err_msg = 'error while loading init data: %s\n' % str(e)
            err_msg += detailed_traceback()
            self._logger.error(err_msg)

        # create VN and IPAM for IPV6 link-local addresses
        ipv6_link_local_nw = '_internal_vn_ipv6_link_local'
        self._create_ipv6_ll_ipam_and_vn(self._vnc_api, ipv6_link_local_nw)

        # - fetch list of all the physical routers
        # - check physical and overlay role associated with each PR
        # - create ref between physical_role and physical_router object,
        # if PR is assigned with a specific physical role
        # - create ref between overlay_roles and physical_router object,
        # if PR is assigned with specific overlay roles
        obj_list = self._vnc_api._objects_list('physical-router')
        pr_list = obj_list.get('physical-routers')
        for pr in pr_list or []:
            try:
                pr_obj = self._vnc_api.\
                    physical_router_read(id=pr.get('uuid'))
                physical_role = pr_obj.get_physical_router_role()
                overlay_roles = pr_obj.get_routing_bridging_roles()
                if overlay_roles is not None:
                    overlay_roles = overlay_roles.get_rb_roles()
                if physical_role:
                    try:
                        physical_role_uuid = self._vnc_api.\
                            fq_name_to_id('physical_role',
                                          ['default-global-system-config',
                                           physical_role])
                        if physical_role_uuid:
                            self._vnc_api.ref_update('physical-router',
                                                     pr.get('uuid'),
                                                     'physical-role',
                                                     physical_role_uuid, None,
                                                     'ADD')
                    except NoIdError:
                        pass
                if overlay_roles:
                    for overlay_role in overlay_roles or []:
                        try:
                            overlay_role_uuid = self._vnc_api.\
                                fq_name_to_id('overlay_role',
                                              ['default-global-system-config',
                                               overlay_role.lower()])
                            if overlay_role_uuid:
                                self._vnc_api.ref_update(
                                    'physical-router', pr.get('uuid'),
                                    'overlay-role', overlay_role_uuid, None,
                                    'ADD')
                        except NoIdError:
                            pass
            except NoIdError:
                pass

        # handle replacing master-LR as <fab_name>-master-LR here
        # as part of in-place cluster update. Copy the master-LR
        # and also its associated vns and their annotations here

        master_lr_obj = None
        try:
            master_lr_obj = self._vnc_api.logical_router_read(
                fq_name=['default-domain', 'default-project', 'master-LR'])
        except NoIdError:
            try:
                master_lr_obj = self._vnc_api.logical_router_read(
                    fq_name=['default-domain', 'admin', 'master-LR'])
            except NoIdError:
                pass

        if master_lr_obj:
            vmi_refs = master_lr_obj.get_virtual_machine_interface_refs() or []
            # get existing pr refs
            pr_refs = master_lr_obj.get_physical_router_refs() or []
            fabric_refs = master_lr_obj.get_fabric_refs() or []
            perms2 = master_lr_obj.get_perms2()
            fab_fq_name = None

            try:
                # This has to happen before creating fab-master-LR as
                # otherwise it will fail creation
                # of fab-master-lr with annotations having master-lr uuid
                # Now delete master-LR object
                # this will delete lr annotations from fabric in
                # corresponding VNs if they exist
                self._vnc_api.logical_router_delete(
                    id=master_lr_obj.get_uuid())

                # try to obtain the fabric refs either by fabric ref if one
                # is available or from pr_refs if available

                if pr_refs and not fabric_refs:
                    # this is assuming that even though there can be
                    # multiple pr refs, a LR cannot have more than
                    # one fabric refs. So a random pr chosen in the pr
                    # refs list will have the same fabric name as the other
                    # prs in the list
                    pr_ref = pr_refs[-1]
                    pr_obj = self._vnc_api.physical_router_read(id=pr_ref.get(
                        'uuid', self._vnc_api.fq_name_to_id(pr_ref.get('to'))))
                    fabric_refs = pr_obj.get_fabric_refs() or []

                if fabric_refs:
                    fabric_ref = fabric_refs[-1]
                    fab_fq_name = fabric_ref.get(
                        'to',
                        self._vnc_api.id_to_fq_name(fabric_ref.get('uuid')))

                # if fab_fq_name is not derivable or was not present, then
                # skip creating fab_name-master-LR as fabric information
                # is not available
                # if fab_fq_name is available, copy necessary refs from prev.
                # master LR, create new fab_name-master-LR and this will update
                # VN annotations accordingly.
                if fab_fq_name:
                    def_project = self._vnc_api.project_read(
                        ['default-domain', 'default-project'])
                    fab_name = fab_fq_name[-1]
                    lr_fq_name = [
                        'default-domain', 'default-project',
                        fab_name + '-master-LR'
                    ]
                    fab_master_lr_obj = LogicalRouter(
                        name=lr_fq_name[-1],
                        fq_name=lr_fq_name,
                        logical_router_gateway_external=False,
                        logical_router_type='vxlan-routing',
                        parent_obj=def_project)
                    perms2.set_global_access(PERMS_RWX)
                    fab_master_lr_obj.set_perms2(perms2)

                    fab_master_lr_obj.set_virtual_machine_interface_list(
                        vmi_refs)
                    fab_master_lr_obj.set_physical_router_list(pr_refs)
                    fab_master_lr_obj.set_fabric_list(fabric_refs)

                    self._vnc_api.logical_router_create(fab_master_lr_obj)
            except NoIdError:
                pass
            except Exception as exc:
                err_msg = "An exception occurred while attempting to " \
                          "create fabric master-LR: %s " % exc.message
                self._logger.warning(err_msg)

        # handle deleted job_templates as part of in-place cluster update
        to_be_del_jt_names = [
            'show_interfaces_template', 'show_config_interfaces_template',
            'show_interfaces_by_names_template'
        ]
        for jt_name in to_be_del_jt_names:
            try:
                self._vnc_api.job_template_delete(
                    fq_name=['default-global-system-config', jt_name])
            except NoIdError:
                pass
    def _load_init_data(self):
        """
        Load init data for job playbooks.

        This function loads init data from a data file specified by the
        argument '--fabric_ansible_dir' to the database. The data file
        must be in JSON format and follow the format below

        "my payload": {
            "object_type": "tag"
            "objects": [
                {
                    "fq_name": [
                        "fabric=management_ip"

                    ],
                    "name": "fabric=management_ip",
                    "tag_type_name": "fabric",
                    "tag_value": "management_ip"

                }

            ]

        }
        """
        try:
            json_data = self._load_json_data()
            if json_data is None:
                self._logger.error('Unable to load init data')
                return

            for item in json_data.get("data"):
                object_type = item.get("object_type")

                # Get the class name from object type
                cls_name = CamelCase(object_type)
                # Get the class object
                cls_ob = str_to_class(cls_name, resource_client.__name__)

                # saving the objects to the database
                for obj in item.get("objects"):
                    instance_obj = cls_ob.from_dict(**obj)

                    # create/update the object
                    fq_name = instance_obj.get_fq_name()
                    try:
                        uuid = self._vnc_api.fq_name_to_id(
                            object_type, fq_name)
                        if object_type == "tag":
                            continue
                        instance_obj.set_uuid(uuid)
                        self._vnc_api._object_update(object_type, instance_obj)
                    except NoIdError:
                        self._vnc_api._object_create(object_type, instance_obj)

            for item in json_data.get("refs"):
                from_type = item.get("from_type")
                from_fq_name = item.get("from_fq_name")
                from_uuid = self._vnc_api.fq_name_to_id(
                    from_type, from_fq_name)

                to_type = item.get("to_type")
                to_fq_name = item.get("to_fq_name")
                to_uuid = self._vnc_api.fq_name_to_id(to_type, to_fq_name)

                self._vnc_api.ref_update(from_type, from_uuid, to_type,
                                         to_uuid, to_fq_name, 'ADD')
        except Exception as e:
            err_msg = 'error while loading init data: %s\n' % str(e)
            err_msg += detailed_traceback()
            self._logger.error(err_msg)
Ejemplo n.º 10
0
    def _Init_Cluster(self):
        self.report_status_init()

        self._cql_select = self._handle_exceptions(self._cql_select, "SELECT")
        self._Insert = self._handle_exceptions(self._Insert, "INSERT")
        self._Remove = self._handle_exceptions(self._Remove, "REMOVE")
        self._Get_Range = self._handle_exceptions(self._Get_Range, "GET_RANGE")
        self._Get_Count = self._handle_exceptions(self._Get_Count, "GET_COUNT")

        # Authentication related options
        auth_provider = None
        if self.options.credential:
            auth_provider = connector.auth.PlainTextAuthProvider(
                username=self.options.credential.get('username'),
                password=self.options.credential.get('password'))

        # SSL related options
        ssl_options, ssl_context = None, None
        if self.options.ssl_enabled:
            ssl_context = ssl.SSLContext(SSL_VERSION)
            ssl_context.load_verify_locations(self.options.ca_certs)
            ssl_context.verify_mode = ssl.CERT_REQUIRED
            ssl_context.check_hostname = False
            ssl_options = {}

        # Profiles related features
        ExecutionProfile = connector.cluster.ExecutionProfile
        profiles = {
            connector.cluster.EXEC_PROFILE_DEFAULT:
            ExecutionProfile(
                # TODO(sahid): Do we really want QUORUM when counting?
                consistency_level=self.ConsistencyLevel,
                row_factory=self.RowFactory),
        }

        # Addresses, ports related options
        endpoints = []
        for address in self._server_list:
            try:
                server, port = address.split(':', 1)

                endpoints.append((server, int(port)))
            except ValueError:
                endpoints.append(address)

        connector.ProtocolVersion.SUPPORTED_VERSIONS = self.ProtocolVersions
        try:
            self._cluster = connector.cluster.Cluster(
                endpoints,
                ssl_options=ssl_options,
                ssl_context=ssl_context,
                auth_provider=auth_provider,
                execution_profiles=profiles,
                cql_version=self.CqlVersion)
            self._cluster.connect()
        except Exception as error:
            raise DatabaseUnavailableError("error, {}: {}".format(
                error, utils.detailed_traceback()))

        # Initializes RW keyspaces
        for ks, cf_dict in self.options.rw_keyspaces.items():
            keyspace = self.keyspace(ks)
            if self.options.reset_config:
                self.safe_drop_keyspace(keyspace)
            self.safe_create_keyspace(keyspace)
            self.ensure_keyspace_replication(keyspace)

        # Ensures RO keyspaces are initialized
        while not self.are_keyspaces_ready(self.options.ro_keyspaces):
            self.options.logger("waiting for keyspaces '{}' to be ready "
                                "before to continue...".format(
                                    self.options.ro_keyspaces),
                                level=SandeshLevel.SYS_INFO)
            # Let's a chance to an other greenthread to be scheduled.
            gevent.sleep(1)

        # The CFs are flatten in a dict with the keyspaces' session
        # related.
        for ks, cf_dict in itertools.chain(self.options.rw_keyspaces.items(),
                                           self.options.ro_keyspaces.items()):
            for cf_name in cf_dict:
                self.create_session(self.keyspace(ks), cf_name)

        # Now we create the tables/CFs if not already alive.
        for cf_name in self._cf_dict:
            self.safe_create_table(cf_name)
            self.ensure_table_properties(cf_name)

        self.report_status_up()
Ejemplo n.º 11
0
    def _load_init_data(self):
        """
        This function loads init data from a data file specified by the
        argument '--fabric_ansible_dir' to the database. The data file
        must be in JSON format and follow the format below:
        {
          "data": [
            {
              "object_type": "<vnc object type name>",
              "objects": [
                {
                  <vnc object payload>
                },
                ...
              ]
            },
            ...
          ]
        }

        Here is an example:
        {
          "data": [
            {
              "object_type": "tag",
              "objects": [
                {
                  "fq_name": [
                    "fabric=management_ip"
                  ],
                  "name": "fabric=management_ip",
                  "tag_type_name": "fabric",
                  "tag_value": "management_ip"
                }
              ]
            }
          ]
        }
        """
        try:
            json_data = self._load_json_data()
            if json_data is None:
                self._logger.error('Unable to load init data')
                return

            for item in json_data.get("data"):
                object_type = item.get("object_type")

                # Get the class name from object type
                cls_name = CamelCase(object_type)
                # Get the class object
                cls_ob = str_to_class(cls_name, resource_client.__name__)

                # saving the objects to the database
                for obj in item.get("objects"):
                    instance_obj = cls_ob.from_dict(**obj)

                    # create/update the object
                    fq_name = instance_obj.get_fq_name()
                    try:
                        uuid = self._vnc_api.fq_name_to_id(object_type, fq_name)
                        if object_type == "tag":
                            continue
                        instance_obj.set_uuid(uuid)
                        self._vnc_api._object_update(object_type, instance_obj)
                    except NoIdError:
                        self._vnc_api._object_create(object_type, instance_obj)

            for item in json_data.get("refs"):
                from_type = item.get("from_type")
                from_fq_name = item.get("from_fq_name")
                from_uuid = self._vnc_api.fq_name_to_id(from_type, from_fq_name)

                to_type = item.get("to_type")
                to_fq_name = item.get("to_fq_name")
                to_uuid = self._vnc_api.fq_name_to_id(to_type, to_fq_name)

                self._vnc_api.ref_update(from_type, from_uuid, to_type,
                                         to_uuid, to_fq_name, 'ADD')
        except Exception as e:
            err_msg = 'error while loading init data: %s\n' % str(e)
            err_msg += detailed_traceback()
            self._logger.error(err_msg)