def __init__(self): if not importutils.try_import('pywsman'): raise exception.DriverLoadError( driver=self.__class__.__name__, reason=_('Unable to import pywsman library')) if not importutils.try_import('dracclient'): raise exception.DriverLoadError( driver=self.__class__.__name__, reason=_('Unable to import python-dracclient library')) self.power = power.DracPower() self.boot = pxe.PXEBoot() self.deploy = iscsi_deploy.ISCSIDeploy() self.management = management.DracManagement() self.iscsi_vendor = iscsi_deploy.VendorPassthru() self.drac_vendor = vendor_passthru.DracVendorPassthru() self.mapping = {'pass_deploy_info': self.iscsi_vendor, 'heartbeat': self.iscsi_vendor, 'pass_bootloader_install_info': self.iscsi_vendor, 'get_bios_config': self.drac_vendor, 'set_bios_config': self.drac_vendor, 'commit_bios_config': self.drac_vendor, 'abandon_bios_config': self.drac_vendor, } self.vendor = utils.MixinVendorInterface(self.mapping) self.inspect = inspector.Inspector.create_if_enabled( 'PXEDracDriver')
def __init__(self): if not importutils.try_import('pywsman'): raise exception.DriverLoadError( driver=self.__class__.__name__, reason=_('Unable to import pywsman library')) if not importutils.try_import('dracclient'): raise exception.DriverLoadError( driver=self.__class__.__name__, reason=_('Unable to import python-dracclient library')) self.power = drac_power.DracPower() self.deploy = fake.FakeDeploy() self.management = drac_mgmt.DracManagement()
def __init__(self, connection_str, project=None, service=None, host=None, context=None, conf=None, transport_url=None, idle_timeout=1, **kwargs): """Driver that uses messaging as transport for notifications :param connection_str: OSProfiler driver connection string, equals to messaging:// :param project: project name that will be included into notification :param service: service name that will be included into notification :param host: host name that will be included into notification :param context: oslo.messaging context :param conf: oslo.config CONF object :param transport_url: oslo.messaging transport, e.g. rabbit://rabbit:password@devstack:5672/ :param idle_timeout: how long to wait for new notifications after the last one seen in the trace; this parameter is useful to collect full trace of asynchronous commands, e.g. when user runs `osprofiler` right after `openstack server create` :param kwargs: black hole for any other parameters """ self.oslo_messaging = importutils.try_import("oslo_messaging") if not self.oslo_messaging: raise ValueError("Oslo.messaging library is required for " "messaging driver") super(Messaging, self).__init__(connection_str, project=project, service=service, host=host) self.context = context if not conf: oslo_config = importutils.try_import("oslo_config") if not oslo_config: raise ValueError("Oslo.config library is required for " "messaging driver") conf = oslo_config.cfg.CONF transport_kwargs = {} if transport_url: transport_kwargs["url"] = transport_url self.transport = self.oslo_messaging.get_notification_transport( conf, **transport_kwargs) self.client = self.oslo_messaging.Notifier( self.transport, publisher_id=self.host, driver="messaging", topics=["profiler"], retry=0) self.idle_timeout = idle_timeout
def __init__(self): if not importutils.try_import("iboot"): raise exception.DriverLoadError(driver=self.__class__.__name__, reason=_("Unable to import iboot library")) self.power = iboot.IBootPower() self.boot = pxe.PXEBoot() self.deploy = agent.AgentDeploy() self.vendor = agent.AgentVendorInterface()
def __init__(self): if not importutils.try_import("iboot"): raise exception.DriverLoadError(driver=self.__class__.__name__, reason=_("Unable to import iboot library")) self.power = iboot.IBootPower() self.boot = pxe.PXEBoot() self.deploy = iscsi_deploy.ISCSIDeploy() self.vendor = iscsi_deploy.VendorPassthru()
def _try_import(module_name): try: return importutils.try_import(module_name) except Exception as e: msg = 'Unable to load module "%s". %s' % (module_name, str(e)) LOG.error(msg) return None
def __init__(self): if not importutils.try_import('pyghmi'): raise exception.DriverLoadError( driver=self.__class__.__name__, reason=_("Unable to import pyghmi library")) self.power = ipminative.NativeIPMIPower() self.console = ipminative.NativeIPMIShellinaboxConsole() self.boot = pxe.PXEBoot() self.deploy = iscsi_deploy.ISCSIDeploy() self.management = ipminative.NativeIPMIManagement() self.iscsi_vendor = iscsi_deploy.VendorPassthru() self.ipminative_vendor = ipminative.VendorPassthru() self.mapping = { 'send_raw': self.ipminative_vendor, 'bmc_reset': self.ipminative_vendor, 'heartbeat': self.iscsi_vendor, 'pass_bootloader_install_info': self.iscsi_vendor, 'pass_deploy_info': self.iscsi_vendor, } self.driver_passthru_mapping = {'lookup': self.iscsi_vendor} self.vendor = utils.MixinVendorInterface(self.mapping, self.driver_passthru_mapping) self.inspect = inspector.Inspector.create_if_enabled( 'PXEAndIPMINativeDriver') self.raid = agent.AgentRAID()
def __init__(self): if not importutils.try_import('iboot'): raise exception.DriverLoadError( driver=self.__class__.__name__, reason=_("Unable to import iboot library")) self.power = iboot.IBootPower() self.deploy = fake.FakeDeploy()
def __init__(self): if not importutils.try_import('pysnmp'): raise exception.DriverLoadError( driver=self.__class__.__name__, reason=_("Unable to import pysnmp library")) self.power = snmp.SNMPPower() self.deploy = fake.FakeDeploy()
def import_zmq(zmq_concurrency='eventlet'): _raise_error_if_invalid_config_value(zmq_concurrency) imported_zmq = importutils.try_import(ZMQ_MODULES[zmq_concurrency], default=None) return imported_zmq
def __init__(self): if not importutils.try_import('iboot'): raise ironic_exception.DriverLoadError( driver=self.__class__.__name__, reason=_("Unable to import iboot library")) self.power = iboot_power.IBootPower() self.boot = pxe.PXEBoot() self.deploy = agent.AgentDeploy()
def __init__(self): if not importutils.try_import('scciclient'): raise exception.DriverLoadError( driver=self.__class__.__name__, reason=_("Unable to import python-scciclient library")) self.power = irmc_power.IRMCPower() self.deploy = fake.FakeDeploy() self.management = irmc_management.IRMCManagement()
def __init__(self): if not importutils.try_import('ImcSdk'): raise exception.DriverLoadError( driver=self.__class__.__name__, reason=_("Unable to import ImcSdk library")) self.power = cimc_power.Power() self.deploy = fake.FakeDeploy() self.management = cimc_mgmt.CIMCManagement()
def __init__(self): if not importutils.try_import("ImcSdk"): raise exception.DriverLoadError(driver=self.__class__.__name__, reason=_("Unable to import ImcSdk library")) self.power = cimc_power.Power() self.boot = pxe.PXEBoot() self.deploy = iscsi_deploy.ISCSIDeploy() self.management = cimc_mgmt.CIMCManagement() self.vendor = iscsi_deploy.VendorPassthru()
def __init__(self): if not importutils.try_import("UcsSdk"): raise exception.DriverLoadError(driver=self.__class__.__name__, reason=_("Unable to import UcsSdk library")) self.power = ucs_power.Power() self.boot = pxe.PXEBoot() self.deploy = agent.AgentDeploy() self.management = ucs_mgmt.UcsManagement() self.vendor = agent.AgentVendorInterface()
def __init__(self): if not importutils.try_import('pyremotevbox'): raise exception.DriverLoadError( driver=self.__class__.__name__, reason=_("Unable to import pyremotevbox library")) self.power = virtualbox.VirtualBoxPower() self.deploy = fake.FakeDeploy() self.management = virtualbox.VirtualBoxManagement()
def check_netapp_lib(): if not importutils.try_import("netapp_lib"): msg = ( "You have not installed the NetApp API Library for OpenStack. " 'Please install it using "sudo pip install netapp-lib" and ' "restart this service!" ) raise exception.NetAppException(msg)
def __init__(self): if not importutils.try_import('pywsman'): raise exception.DriverLoadError( driver=self.__class__.__name__, reason=_("Unable to import pywsman library")) self.power = amt_power.AMTPower() self.deploy = fake.FakeDeploy() self.management = amt_mgmt.AMTManagement()
def __init__(self): if not importutils.try_import('pyghmi'): raise exception.DriverLoadError( driver=self.__class__.__name__, reason=_("Unable to import pyghmi IPMI library")) self.power = ipminative.NativeIPMIPower() self.console = ipminative.NativeIPMIShellinaboxConsole() self.deploy = fake.FakeDeploy() self.management = ipminative.NativeIPMIManagement()
def import_zmq(zmq_concurrency='eventlet'): _raise_error_if_invalid_config_value(zmq_concurrency) imported_zmq = importutils.try_import(ZMQ_MODULES[zmq_concurrency], default=None) if imported_zmq is None: LOG.error(_LE("ZeroMQ not found!")) return imported_zmq
def __init__(self): if not importutils.try_import("UcsSdk"): raise exception.DriverLoadError(driver=self.__class__.__name__, reason=_("Unable to import UcsSdk library")) self.power = ucs_power.Power() self.boot = pxe.PXEBoot() self.deploy = iscsi_deploy.ISCSIDeploy() self.management = ucs_mgmt.UcsManagement() self.vendor = iscsi_deploy.VendorPassthru() self.inspect = inspector.Inspector.create_if_enabled("PXEAndUcsDriver")
def __init__(self): if not importutils.try_import('pywsman'): raise exception.DriverLoadError( driver=self.__class__.__name__, reason=_("Unable to import pywsman library")) self.power = amt_power.AMTPower() self.deploy = pxe.PXEDeploy() self.management = amt_management.AMTManagement() self.vendor = amt_vendor.AMTPXEVendorPassthru()
def __init__(self): if not importutils.try_import('UcsSdk'): raise exception.DriverLoadError( driver=self.__class__.__name__, reason=_("Unable to import UcsSdk library")) self.power = ucs_power.Power() self.deploy = pxe.PXEDeploy() self.management = ucs_mgmt.UcsManagement() self.vendor = pxe.VendorPassthru()
def __init__(self, host, manager_module, manager_class): super(RPCService, self).__init__() self.host = host manager_module = importutils.try_import(manager_module) manager_class = getattr(manager_module, manager_class) self.manager = manager_class(host, manager_module.MANAGER_TOPIC) self.topic = self.manager.topic self.rpcserver = None self.deregister = True
def __init__(self): if not importutils.try_import('proliantutils'): raise exception.DriverLoadError( driver=self.__class__.__name__, reason=_("Unable to import proliantutils library")) self.power = ilo_power.IloPower() self.deploy = fake.FakeDeploy() self.management = ilo_management.IloManagement() self.inspect = ilo_inspect.IloInspect()
def __init__(self): if not importutils.try_import('pywsman'): raise ironic_exception.DriverLoadError( driver=self.__class__.__name__, reason=_("Unable to import pywsman library")) self.power = amt_power.AMTPower() self.boot = pxe.PXEBoot() self.deploy = agent.AgentDeploy() self.management = amt_management.AMTManagement()
def __init__(self): if not importutils.try_import('pyremotevbox'): raise exception.DriverLoadError( driver=self.__class__.__name__, reason=_("Unable to import pyremotevbox library")) self.power = virtualbox.VirtualBoxPower() self.boot = pxe.PXEBoot() self.deploy = iscsi_deploy.ISCSIDeploy() self.management = virtualbox.VirtualBoxManagement() self.raid = agent.AgentRAID()
def _get_package_root_dir(config): root_module = importutils.try_import(_get_project_base(config)) if not root_module: project = config.get_main_option('neutron_project') alembic_util.err(_("Failed to locate source for %s.") % project) # The root_module.__file__ property is a path like # '/opt/stack/networking-foo/networking_foo/__init__.py' # We return just # '/opt/stack/networking-foo' return os.path.dirname(os.path.dirname(root_module.__file__))
def __init__(self): if not importutils.try_import('seamicroclient'): raise exception.DriverLoadError( driver=self.__class__.__name__, reason=_("Unable to import seamicroclient library")) self.power = seamicro.Power() self.deploy = fake.FakeDeploy() self.management = seamicro.Management() self.vendor = seamicro.VendorPassthru() self.console = seamicro.ShellinaboxConsole()
def __init__(self): if not importutils.try_import('scciclient'): raise exception.DriverLoadError( driver=self.__class__.__name__, reason=_("Unable to import python-scciclient library")) self.power = irmc_power.IRMCPower() self.console = ipmitool.IPMIShellinaboxConsole() self.deploy = pxe.PXEDeploy() self.management = irmc_management.IRMCManagement() self.vendor = pxe.VendorPassthru()
LOG = logging.getLogger(__name__) METRICS = metrics_utils.get_metrics_logger(__name__) BOOT_DEVICE_MAPPING_TO_OV = { boot_devices.DISK: 'HardDisk', boot_devices.PXE: 'PXE', boot_devices.CDROM: 'CD', } BOOT_DEVICE_OV_TO_GENERIC = { v: k for k, v in BOOT_DEVICE_MAPPING_TO_OV.items() } oneview_exceptions = importutils.try_import('oneview_client.exceptions') class OneViewManagement(base.ManagementInterface): def __init__(self): super(OneViewManagement, self).__init__() self.oneview_client = common.get_oneview_client() def get_properties(self): return deploy_utils.get_properties() @METRICS.timer('OneViewManagement.validate') def validate(self, task): """Checks required info on 'driver_info' and validates node with OneView Validates whether the 'driver_info' property of the supplied
# Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ The MatchMaker classes should accept a Topic or Fanout exchange key and return keys for direct exchanges, per (approximate) AMQP parlance. """ from oslo_config import cfg from oslo_utils import importutils from oslo_messaging._drivers import matchmaker as mm_common redis = importutils.try_import('redis') matchmaker_redis_opts = [ cfg.StrOpt('host', default='127.0.0.1', help='Host to locate redis.'), cfg.IntOpt('port', default=6379, help='Use this port to connect to redis host.'), cfg.StrOpt('password', help='Password for Redis server (optional).'), ] CONF = cfg.CONF opt_group = cfg.OptGroup(name='matchmaker_redis', title='Options for Redis-based MatchMaker') CONF.register_group(opt_group) CONF.register_opts(matchmaker_redis_opts, opt_group)
# under the License. """The bare-metal admin extension.""" from oslo_utils import importutils import webob from nova.api.openstack.api_version_request \ import MAX_PROXY_API_SUPPORT_VERSION from nova.api.openstack import common from nova.api.openstack import extensions from nova.api.openstack import wsgi import nova.conf from nova.i18n import _ from nova.policies import baremetal_nodes as bn_policies ironic_client = importutils.try_import('ironicclient.client') ironic_exc = importutils.try_import('ironicclient.exc') node_fields = [ 'id', 'cpus', 'local_gb', 'memory_mb', 'pm_address', 'pm_user', 'service_host', 'terminal_port', 'instance_uuid' ] node_ext_fields = ['uuid', 'task_state', 'updated_at', 'pxe_config_path'] interface_fields = ['id', 'address', 'datapath_id', 'port_no'] CONF = nova.conf.CONF def _check_ironic_client_enabled():
# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from oslo_utils import importutils from ironic.common import boot_devices from ironic.common import exception from ironic.drivers import base from ironic.drivers.modules.cimc import common imcsdk = importutils.try_import('ImcSdk') CIMC_TO_IRONIC_BOOT_DEVICE = { 'storage-read-write': boot_devices.DISK, 'lan-read-only': boot_devices.PXE, 'vm-read-only': boot_devices.CDROM } IRONIC_TO_CIMC_BOOT_DEVICE = { boot_devices.DISK: ('lsbootStorage', 'storage-read-write', 'storage', 'read-write'), boot_devices.PXE: ('lsbootLan', 'lan-read-only', 'lan', 'read-only'), boot_devices.CDROM: ('lsbootVirtualMedia', 'vm-read-only', 'virtual-media', 'read-only') }
def dependencies_installed(self): return importutils.try_import('monasca_common', False)
from oslo_utils import timeutils import sqlalchemy from sqlalchemy import asc from sqlalchemy import desc from sqlalchemy.engine import url as sqlalchemy_url from sqlalchemy import func from sqlalchemy.orm import exc import aodh from aodh import storage from aodh.storage import base from aodh.storage import models as alarm_api_models from aodh.storage.sqlalchemy import models from aodh.storage.sqlalchemy import utils as sql_utils osprofiler_sqlalchemy = importutils.try_import('osprofiler.sqlalchemy') LOG = log.getLogger(__name__) AVAILABLE_CAPABILITIES = { 'alarms': { 'query': { 'simple': True, 'complex': True }, 'history': { 'query': { 'simple': True, 'complex': True } }
help=_('ironic-inspector HTTP endpoint. If this is not set, ' 'the ironic-inspector client default ' '(http://127.0.0.1:5050) will be used.'), deprecated_group='discoverd'), cfg.IntOpt('status_check_period', default=60, help=_('period (in seconds) to check status of nodes ' 'on inspection'), deprecated_group='discoverd'), ] CONF = cfg.CONF CONF.register_opts(inspector_opts, group='inspector') CONF.import_opt('auth_strategy', 'ironic.api.app') client = importutils.try_import('ironic_inspector_client') INSPECTOR_API_VERSION = (1, 0) class Inspector(base.InspectInterface): """In-band inspection via ironic-inspector project.""" @classmethod def create_if_enabled(cls, driver_name): """Create instance of Inspector if it's enabled. Reports log warning with given driver_name if it's not. :return: Inspector instance or None """ if CONF.inspector.enabled:
from oslo_utils import importutils from ironic.common import boot_devices from ironic.common import exception from ironic.common import states from ironic.conductor import task_manager from ironic.drivers.modules import boot_mode_utils from ironic.drivers.modules import deploy_utils from ironic.drivers.modules import image_utils from ironic.drivers.modules.redfish import boot as redfish_boot from ironic.drivers.modules.redfish import utils as redfish_utils from ironic.tests.unit.db import base as db_base from ironic.tests.unit.db import utils as db_utils from ironic.tests.unit.objects import utils as obj_utils sushy = importutils.try_import('sushy') INFO_DICT = db_utils.get_test_redfish_info() @mock.patch('oslo_utils.eventletutils.EventletEvent.wait', lambda *args, **kwargs: None) class RedfishVirtualMediaBootTestCase(db_base.DbTestCase): def setUp(self): super(RedfishVirtualMediaBootTestCase, self).setUp() self.config(enabled_hardware_types=['redfish'], enabled_power_interfaces=['redfish'], enabled_boot_interfaces=['redfish-virtual-media'], enabled_management_interfaces=['redfish'], enabled_inspect_interfaces=['redfish'], enabled_bios_interfaces=['redfish'])
from nova import conductor import nova.conf from nova import context from nova import debugger from nova import exception from nova.i18n import _, _LE, _LI, _LW from nova import objects from nova.objects import base as objects_base from nova.objects import service as service_obj from nova import rpc from nova import servicegroup from nova import utils from nova import version from nova import wsgi osprofiler = importutils.try_import("osprofiler") osprofiler_initializer = importutils.try_import("osprofiler.initializer") LOG = logging.getLogger(__name__) CONF = nova.conf.CONF SERVICE_MANAGERS = { 'nova-compute': 'nova.compute.manager.ComputeManager', 'nova-conductor': 'nova.conductor.manager.ConductorManager', 'nova-scheduler': 'nova.scheduler.manager.SchedulerManager', } def _create_service_ref(this_service, context): service = objects.Service(context)
# Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from oslo_utils import importutils from ironic.drivers.modules.xclarity import common from ironic.tests.unit.db import base as db_base from ironic.tests.unit.db import utils as db_utils from ironic.tests.unit.objects import utils as obj_utils xclarity_exceptions = importutils.try_import('xclarity_client.exceptions') xclarity_constants = importutils.try_import('xclarity_client.constants') class XClarityCommonTestCase(db_base.DbTestCase): def setUp(self): super(XClarityCommonTestCase, self).setUp() self.config(manager_ip='1.2.3.4', group='xclarity') self.config(username='******', group='xclarity') self.config(password='******', group='xclarity') self.node = obj_utils.create_test_node( self.context, driver='fake-xclarity', properties=db_utils.get_test_xclarity_properties(),
import mock from oslo_utils import importutils from ironic.common import driver_factory from ironic.drivers.modules.oneview import common from ironic.drivers.modules.oneview import deploy from ironic.drivers.modules.oneview import deploy_utils from ironic import objects from ironic.tests.unit.conductor import mgr_utils from ironic.tests.unit.db import base as db_base from ironic.tests.unit.db import utils as db_utils from ironic.tests.unit.objects import utils as obj_utils oneview_models = importutils.try_import('oneview_client.models') @mock.patch.object(common, 'get_oneview_client', spec_set=True, autospec=True) class OneViewPeriodicTasks(db_base.DbTestCase): def setUp(self): super(OneViewPeriodicTasks, self).setUp() self.config(manager_url='https://1.2.3.4', group='oneview') self.config(username='******', group='oneview') self.config(password='******', group='oneview') mgr_utils.mock_the_extension_manager(driver='fake_oneview') self.driver = driver_factory.get_driver('fake_oneview') self.node = obj_utils.create_test_node( self.context,
import mock from oslo_utils import importutils from ironic.common import exception from ironic.common import raid from ironic.common import states from ironic.conductor import task_manager from ironic.conductor import utils as manager_utils from ironic.drivers.modules import deploy_utils from ironic.drivers.modules.ilo import common as ilo_common from ironic.drivers.modules.ilo import raid as ilo_raid from ironic.tests.unit.db import base as db_base from ironic.tests.unit.db import utils as db_utils from ironic.tests.unit.objects import utils as obj_utils ilo_error = importutils.try_import('proliantutils.exception') INFO_DICT = db_utils.get_test_ilo_info() class Ilo5RAIDTestCase(db_base.DbTestCase): def setUp(self): super(Ilo5RAIDTestCase, self).setUp() self.driver = mock.Mock(raid=ilo_raid.Ilo5RAID()) self.target_raid_config = { "logical_disks": [{ 'size_gb': 200, 'raid_level': 0, 'is_root_volume': True }, { 'size_gb': 200,
import logging as py_logging import os import os.path from oslo_config import cfg from oslo_log import log as logging from oslo_middleware import cors from oslo_policy import opts as policy_opts from oslo_utils import importutils import pbr.version from placement import conf from placement import db_api from placement import deploy osprofiler = importutils.try_import('osprofiler') osprofiler_initializer = importutils.try_import('osprofiler.initializer') profiler = importutils.try_import('osprofiler.opts') CONFIG_FILE = 'placement.conf' # The distribution name is required here, not package. version_info = pbr.version.VersionInfo('openstack-placement') def setup_logging(config): # Any dependent libraries that have unhelp debug levels should be # pinned to a higher default. extra_log_level_defaults = [ 'routes=INFO', ]
# Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import abc import json from oslo_config import cfg from oslo_utils import importutils from mistral import exceptions as exc CONF = cfg.CONF _PYV8 = importutils.try_import('PyV8') _V8EVAL = importutils.try_import('v8eval') class JSEvaluator(object): @classmethod @abc.abstractmethod def evaluate(cls, script, context): """Executes given JavaScript.""" pass class PyV8Evaluator(JSEvaluator): @classmethod def evaluate(cls, script, context): if not _PYV8:
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. ### # This code is taken from nova. Goal is minimal modification. ### from oslo_log import log as logging from oslo_utils import importutils import webob.dec from zun.common import context import zun.conf profiler = importutils.try_import("osprofiler.profiler") profiler_initializer = importutils.try_import("osprofiler.initializer") profiler_web = importutils.try_import("osprofiler.web") CONF = zun.conf.CONF LOG = logging.getLogger(__name__) class WsgiMiddleware(object): def __init__(self, application, **kwargs): self.application = application @classmethod def factory(cls, global_conf, **local_conf): if profiler_web:
from oslo_utils import uuidutils from ironic.common import driver_factory from ironic.common import exception from ironic.common import states from ironic.conductor import task_manager from ironic.drivers.modules.oneview import common from ironic.drivers.modules.oneview import deploy_utils from ironic.drivers.modules.oneview import management from ironic.drivers.modules.oneview import power from ironic.tests.unit.conductor import mgr_utils from ironic.tests.unit.db import base as db_base from ironic.tests.unit.db import utils as db_utils from ironic.tests.unit.objects import utils as obj_utils client_exception = importutils.try_import('hpOneView.exceptions') class OneViewPowerDriverTestCase(db_base.DbTestCase): def setUp(self): super(OneViewPowerDriverTestCase, self).setUp() self.config(manager_url='https://1.2.3.4', group='oneview') self.config(username='******', group='oneview') self.config(password='******', group='oneview') self.config(tls_cacert_file='ca_file', group='oneview') self.config(allow_insecure_connections=False, group='oneview') mgr_utils.mock_the_extension_manager(driver='fake_oneview') self.driver = driver_factory.get_driver('fake_oneview') self.node = obj_utils.create_test_node(
DRAC management interface """ from ironic_lib import metrics_utils from oslo_log import log as logging from oslo_utils import importutils from ironic.common import boot_devices from ironic.common import exception from ironic.common.i18n import _ from ironic.conductor import task_manager from ironic.drivers import base from ironic.drivers.modules.drac import common as drac_common from ironic.drivers.modules.drac import job as drac_job drac_exceptions = importutils.try_import('dracclient.exceptions') LOG = logging.getLogger(__name__) METRICS = metrics_utils.get_metrics_logger(__name__) _BOOT_DEVICES_MAP = { boot_devices.DISK: 'HardDisk', boot_devices.PXE: 'NIC', boot_devices.CDROM: 'Optical', } # BootMode constants PERSISTENT_BOOT_MODE = 'IPL' NON_PERSISTENT_BOOT_MODE = 'OneTime'
# a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils import importutils import testtools from oslo_messaging.tests import utils as test_utils redis = importutils.try_import('redis') matchmaker_redis = ( importutils.try_import('oslo_messaging._drivers.matchmaker_redis')) def redis_available(): '''Helper to see if local redis server is running''' if not redis: return False try: c = redis.StrictRedis(socket_timeout=1) c.ping() return True except redis.exceptions.ConnectionError: return False
from neutron.common import rpc as n_rpc from neutron.common import topics from neutron.common import utils as common_utils from neutron import context as n_context from networking_cisco._i18n import _, _LE, _LI, _LW from networking_cisco import backwards_compatibility as bc from networking_cisco.plugins.cisco.cfg_agent import cfg_exceptions from networking_cisco.plugins.cisco.cfg_agent.device_drivers import driver_mgr from networking_cisco.plugins.cisco.cfg_agent import device_status from networking_cisco.plugins.cisco.common import (cisco_constants as c_constants) from networking_cisco.plugins.cisco.extensions import ha from networking_cisco.plugins.cisco.extensions import routerrole ncc_errors = importutils.try_import('ncclient.transport.errors') LOG = logging.getLogger(__name__) N_ROUTER_PREFIX = 'nrouter-' ROUTER_ROLE_ATTR = routerrole.ROUTER_ROLE_ATTR # Number of routers to fetch from server at a time on resync. # Needed to reduce load on server side and to speed up resync on agent side. SYNC_ROUTERS_MAX_CHUNK_SIZE = 64 SYNC_ROUTERS_MIN_CHUNK_SIZE = 8 class RouterInfo(object): """Wrapper class around the (neutron) router dictionary.
# Supported components for firmware update when invoked through manual clean # step, ``update_firmware``. SUPPORTED_ILO_FIRMWARE_UPDATE_COMPONENTS = [ 'ilo', 'cpld', 'power_pic', 'bios', 'chassis' ] # Mandatory fields to be provided as part of firmware image update # with manual clean step FIRMWARE_IMAGE_INFO_FIELDS = {'url', 'checksum'} CONF = cfg.CONF LOG = logging.getLogger(__name__) proliantutils_error = importutils.try_import('proliantutils.exception') proliantutils_utils = importutils.try_import('proliantutils.utils') def verify_firmware_update_args(func): """Verifies the firmware update arguments.""" @functools.wraps(func) def wrapper(self, task, **kwargs): """Wrapper around ``update_firmware`` call. :param task: a TaskManager object. :raises: InvalidParameterValue if validation fails for input arguments of firmware update. """ firmware_update_mode = kwargs.get('firmware_update_mode') firmware_images = kwargs.get('firmware_images')
import six from troveclient.apiclient import exceptions as exc import troveclient.auth_plugin from troveclient import client import troveclient.extension from troveclient.i18n import _ # noqa from troveclient import utils from troveclient.v1 import shell as shell_v1 DEFAULT_OS_DATABASE_API_VERSION = "1.0" DEFAULT_TROVE_ENDPOINT_TYPE = 'publicURL' DEFAULT_TROVE_SERVICE_TYPE = 'database' logger = logging.getLogger(__name__) osprofiler_profiler = importutils.try_import("osprofiler.profiler") class TroveClientArgumentParser(argparse.ArgumentParser): def __init__(self, *args, **kwargs): super(TroveClientArgumentParser, self).__init__(*args, **kwargs) def add_argument(self, *args, **kwargs): if kwargs.get('help') is None: raise Exception( _("An argument '%s' was specified without help.") % args[0]) super(TroveClientArgumentParser, self).add_argument(*args, **kwargs) def error(self, message): """error(message: string)
from ironic_lib import metrics_utils from oslo_log import log as logging from oslo_utils import importutils from ironic.common import boot_devices from ironic.common import exception from ironic.common.i18n import _ from ironic.conductor import task_manager from ironic.drivers import base from ironic.drivers.modules.xclarity import common LOG = logging.getLogger(__name__) METRICS = metrics_utils.get_metrics_logger(__name__) xclarity_client_exceptions = importutils.try_import( 'xclarity_client.exceptions') BOOT_DEVICE_MAPPING_TO_XCLARITY = { boot_devices.PXE: 'PXE Network', boot_devices.DISK: 'Hard Disk 0', boot_devices.CDROM: 'CD/DVD Rom', boot_devices.BIOS: 'Boot To F1' } SUPPORTED_BOOT_DEVICES = [ boot_devices.PXE, boot_devices.DISK, boot_devices.CDROM, boot_devices.BIOS, ]
""" iRMC Inspect Interface """ from ironic_lib import metrics_utils from oslo_log import log as logging from oslo_utils import importutils from ironic.common import exception from ironic.common.i18n import _, _LI, _LW from ironic.common import states from ironic.drivers import base from ironic.drivers.modules.irmc import common as irmc_common from ironic.drivers.modules import snmp from ironic import objects scci = importutils.try_import('scciclient.irmc.scci') LOG = logging.getLogger(__name__) METRICS = metrics_utils.get_metrics_logger(__name__) """ SC2.mib: sc2UnitNodeClass returns NIC type. sc2UnitNodeClass OBJECT-TYPE SYNTAX INTEGER { unknown(1), primary(2), secondary(3), management-blade(4), secondary-remote(5),
# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Unity backend for the EMC Manila driver.""" import random from oslo_config import cfg from oslo_log import log from oslo_utils import excutils from oslo_utils import importutils from oslo_utils import netutils storops = importutils.try_import('storops') if storops: # pylint: disable=import-error from storops import exception as storops_ex from storops.unity import enums from manila.common import constants as const from manila import exception from manila.i18n import _ from manila.share.drivers.dell_emc.common.enas import utils as enas_utils from manila.share.drivers.dell_emc.plugins import base as driver from manila.share.drivers.dell_emc.plugins.unity import client from manila.share.drivers.dell_emc.plugins.unity import utils as unity_utils from manila.share import utils as share_utils from manila import utils """Version history:
from oslo_utils import encodeutils, importutils import requests import six from six.moves.urllib import parse from eclcli.orchestration.heatclient.common import utils from eclcli.orchestration.heatclient import exc from eclcli.orchestration.heatclient.openstack.common._i18n import _ from eclcli.orchestration.heatclient.openstack.common._i18n import _LW from keystoneauth1 import adapter LOG = logging.getLogger(__name__) USER_AGENT = 'python-heatclient' CHUNKSIZE = 1024 * 64 # 64kB SENSITIVE_HEADERS = ('X-Auth-Token', ) osprofiler_web = importutils.try_import("osprofiler.web") def get_system_ca_file(): """Return path to system default CA file.""" # Standard CA file locations for Debian/Ubuntu, RedHat/Fedora, # Suse, FreeBSD/OpenBSD, MacOSX, and the bundled ca ca_path = [ '/etc/ssl/certs/ca-certificates.crt', '/etc/pki/tls/certs/ca-bundle.crt', '/etc/ssl/ca-bundle.pem', '/etc/ssl/cert.pem', '/System/Library/OpenSSL/certs/cacert.pem', requests.certs.where() ] for ca in ca_path: LOG.debug("Looking for ca file %s", ca) if os.path.exists(ca):
from cinder import context from cinder import exception from cinder.i18n import _, _LE, _LI, _LW from cinder.volume import driver from cinder.volume import utils from cinder.volume import volume_types import six import math import re LOG = logging.getLogger(__name__) hpelefthandclient = importutils.try_import("hpelefthandclient") if hpelefthandclient: from hpelefthandclient import client as hpe_lh_client from hpelefthandclient import exceptions as hpeexceptions hpelefthand_opts = [ cfg.StrOpt('hpelefthand_api_url', default=None, help="HPE LeftHand WSAPI Server Url like " "https://<LeftHand ip>:8081/lhos", deprecated_name='hplefthand_api_url'), cfg.StrOpt('hpelefthand_username', default=None, help="HPE LeftHand Super user username", deprecated_name='hplefthand_username'), cfg.StrOpt('hpelefthand_password',
from oslo_utils import excutils from oslo_utils import importutils from ironic.common import boot_devices from ironic.common import exception from ironic.common.i18n import _ from ironic.common.i18n import _LE from ironic.common.i18n import _LW from ironic.common import states from ironic.common import utils from ironic.conductor import task_manager from ironic.drivers import base from ironic.drivers.modules import console_utils from ironic.drivers import utils as driver_utils pyghmi = importutils.try_import('pyghmi') if pyghmi: from pyghmi import exceptions as pyghmi_exception from pyghmi.ipmi import command as ipmi_command opts = [ cfg.IntOpt('retry_timeout', default=60, help=_('Maximum time in seconds to retry IPMI operations. ' 'There is a tradeoff when setting this value. Setting ' 'this too low may cause older BMCs to crash and require ' 'a hard reset. However, setting too high can cause the ' 'sync power state periodic task to hang when there are ' 'slow or unresponsive BMCs.')), cfg.IntOpt('min_command_interval', default=5,
from __future__ import absolute_import from oslo_config import cfg from oslo_log import log as logging from oslo_utils import importutils from oslo_utils import units import six from cinder import exception from cinder.i18n import _, _LE from cinder.volume import driver from cinder.volume import volume_types LOG = logging.getLogger(__name__) storpool = importutils.try_import('storpool') if storpool: from storpool import spapi from storpool import spconfig from storpool import spopenstack from storpool import sptypes storpool_opts = [ cfg.StrOpt('storpool_template', default=None, help='The StorPool template for volumes with no type.'), cfg.IntOpt('storpool_replication', default=3, help='The default StorPool chain replication value. ' 'Used when creating a volume with no specified type if ' 'storpool_template is not set. Also used for calculating '
from oslo_utils import encodeutils from oslo_utils import excutils from oslo_utils import importutils from oslo_utils import strutils from oslo_utils import timeutils from oslo_utils import units import six from six.moves import range import nova.conf from nova import exception from nova.i18n import _, _LE, _LI, _LW import nova.network from nova import safe_utils profiler = importutils.try_import('osprofiler.profiler') CONF = nova.conf.CONF LOG = logging.getLogger(__name__) _IS_NEUTRON = None synchronized = lockutils.synchronized_with_prefix('nova-') SM_IMAGE_PROP_PREFIX = "image_" SM_INHERITABLE_KEYS = ( 'min_ram', 'min_disk', 'disk_format', 'container_format', ) # Keys which hold large structured data that won't fit in the
from heat.common import exception from heat.common.i18n import _ from heat.common import template_format from heat.engine import resource from heat.engine import rsrc_defn from heat.engine import scheduler from heat.tests import common from heat.tests import utils import testtools from heat_docker.resources import docker_container from heat_docker.tests import fake_docker_client as fakeclient docker = importutils.try_import('docker') template = ''' { "AWSTemplateFormatVersion": "2010-09-09", "Description": "Test template", "Parameters": {}, "Resources": { "Blog": { "Type": "DockerInc::Docker::Container", "Properties": { "image": "samalba/wordpress", "env": [ "FOO=bar" ] }