示例#1
0
 def __init__(self):
     self._pid = None
     self._process = None
     setup_logger('elasticsearch',
                  logging.INFO)
     setup_logger('elasticsearch.trace',
                  logging.INFO)
示例#2
0
 def __init__(self):
     self._pid = None
     self._process = None
     setup_logger('elasticsearch',
                  logging.INFO)
     setup_logger('elasticsearch.trace',
                  logging.INFO)
def start_events_and_logs_polling(logs_handler_retriever=None):
    """
    Fetches events and logs from RabbitMQ.
    """
    if not RABBITMQ_POLLING_ENABLED:
        return

    setup_logger('pika', logging.INFO)
    connection = pika.BlockingConnection(
        pika.ConnectionParameters(host='localhost'))
    channel = connection.channel()
    queues = ['cloudify-events', 'cloudify-logs']
    for q in queues:
        channel.queue_declare(queue=q,
                              auto_delete=True,
                              durable=True,
                              exclusive=False)

    def callback(ch, method, properties, body):
        try:
            event = json.loads(body)
            if RABBITMQ_VERBOSE_MESSAGES_ENABLED:
                output = '\n{0}'.format(json.dumps(event, indent=4))
            else:
                output = create_event_message_prefix(event)
            if output:
                logger.info(output)
            if logs_handler_retriever:
                logs_handler_retriever()(output or '', event)
        except Exception as e:
            logger.error(
                'event/log format error - output: {0} [message={1}]'.format(
                    body, e.message))
            s_traceback = StringIO.StringIO()
            traceback.print_exc(file=s_traceback)
            logger.error(s_traceback.getvalue())

    def consume():
        channel.basic_consume(callback, queue=queues[0], no_ack=True)
        channel.basic_consume(callback, queue=queues[1], no_ack=True)
        channel.start_consuming()

    logger.info(
        "Starting RabbitMQ events/logs polling - queues={0}".format(queues))

    polling_thread = threading.Thread(target=consume)
    polling_thread.daemon = True
    polling_thread.start()
示例#4
0
    def __init__(self, username=None, storage=None, logger=None):

        """
        :param username: the username the daemons are registered under.
                         if no username if passed, the currently logged user
                         will be used. this setting is used for computing
                         the storage directory, hence, if `storage` is passed,
                         the username will be ignored.
        :param storage: the storage directory where daemons are stored.
                        if no directory is passed, it will computed using the
                        `utils.get_storage_directory` function.
        :param logger: a logger to be used to log various subsequent
                       operations.

        """

        ######################################################################
        # `username` and `storage` are arguments because the default home
        # directory may change depending on how the daemon process is
        # executed. For example if running in a Windows Service, the home
        # directory changes. This means that we must the ability to specify
        # exactly where the storage directory is, and not let the code
        # auto-detect it in any scenario.
        #####################################################################

        self.username = username
        self.storage = storage or utils.internal.get_storage_directory(
            self.username)
        self.logger = logger or setup_logger('cloudify_agent.api.factory')
示例#5
0
 def __init__(self, root_path=None, port=5555):
     self.port = port
     self.root_path = root_path or os.path.dirname(resources.__file__)
     self.process = None
     self.logger = setup_logger('cloudify_agent.tests.utils.FileServer',
                                logger_level=logging.DEBUG)
     self.runner = LocalCommandRunner(self.logger)
 def setUpClass(cls):
     cls.logger = setup_logger('cloudify_agent.tests.api.test_utils')
     cls.file_server_resource_base = tempfile.mkdtemp(
         prefix='file-server-resource-base')
     cls.fs = test_utils.FileServer(root_path=cls.file_server_resource_base)
     cls.fs.start()
     cls.file_server_url = 'http://localhost:{0}'.format(cls.fs.port)
示例#7
0
 def __init__(self, cloudify_agent, logger=None):
     self.cloudify_agent = cloudify_agent
     self.logger = logger or setup_logger(self.__class__.__name__)
     self.broker_get_settings_from_manager = cloudify_agent.get(
         'broker_get_settings_from_manager',
         True,
     )
示例#8
0
    def setUpClass(cls):

        cls.logger = setup_logger(cls.__name__, logger_level=logging.DEBUG)
        cls.runner = LocalCommandRunner(cls.logger)

        cls.plugins_work_dir = tempfile.mkdtemp(prefix='plugins-work-dir-')
        cls.file_server_resource_base = tempfile.mkdtemp(
            prefix='file-server-resource-base-')
        cls.fs = test_utils.FileServer(root_path=cls.file_server_resource_base)
        cls.fs.start()
        cls.file_server_url = 'http://localhost:{0}'.format(cls.fs.port)

        cls.plugins_to_be_installed = [
            'mock-plugin', 'mock-plugin-modified',
            'mock-plugin-with-requirements'
        ]

        cls.wagons = {}

        for plugin_dir in cls.plugins_to_be_installed:
            test_utils.create_plugin_tar(
                plugin_dir_name=plugin_dir,
                target_directory=cls.file_server_resource_base)
            cls.wagons[plugin_dir] = test_utils.create_plugin_wagon(
                plugin_dir_name=plugin_dir,
                target_directory=cls.file_server_resource_base)
    def __init__(self,
                 user,
                 password,
                 protocol=None,
                 host=None,
                 port=None,
                 uri=None,
                 validate_connection=True,
                 logger=None):

        logger = logger or setup_logger('WinRMRunner')

        self.session_config = {
            'protocol': protocol or DEFAULT_WINRM_PROTOCOL,
            'host': host,
            'port': port or DEFAULT_WINRM_PORT,
            'uri': uri or DEFAULT_WINRM_URI,
            'user': user,
            'password': password
        }

        # Validations - [host, user, password]
        validate(self.session_config)

        self.session = self._create_session()
        self.logger = logger

        if validate_connection:
            self.validate_connection()
示例#10
0
    def __init__(self, username=None, storage=None, logger=None):

        """
        :param username: the username the daemons are registered under.
                         if no username if passed, the currently logged user
                         will be used. this setting is used for computing
                         the storage directory, hence, if `storage` is passed,
                         the username will be ignored.
        :param storage: the storage directory where daemons are stored.
                        if no directory is passed, it will computed using the
                        `utils.get_storage_directory` function.
        :param logger: a logger to be used to log various subsequent
                       operations.

        """

        ######################################################################
        # `username` and `storage` are arguments because the default home
        # directory may change depending on how the daemon process is
        # executed. For example if running in a Windows Service, the home
        # directory changes. This means that we must the ability to specify
        # exactly where the storage directory is, and not let the code
        # auto-detect it in any scenario.
        #####################################################################

        self.username = username
        self.storage = storage or utils.internal.get_storage_directory(
            self.username)
        self.logger = logger or setup_logger('cloudify_agent.api.factory')
示例#11
0
    def __init__(self,
                 logger=None,
                 host=None,
                 user=None,
                 key=None,
                 port=None,
                 password=None,
                 validate_connection=True,
                 fabric_env=None,
                 tmpdir=None):

        # logger
        self.logger = logger or setup_logger('fabric_runner')

        # silence paramiko
        logging.getLogger('paramiko.transport').setLevel(logging.WARNING)

        # connection details
        self.port = port or DEFAULT_REMOTE_EXECUTION_PORT
        self.password = password
        self.user = user
        self.host = host
        self.key = key
        self.tmpdir = tmpdir

        # fabric environment
        self.env = self._set_env()
        self.env.update(fabric_env or {})
        self._connection = None

        self._validate_ssh_config()
        if validate_connection:
            self.validate_connection()
示例#12
0
 def __init__(self, root_path=None, port=5555):
     self.port = port
     self.root_path = root_path or os.path.dirname(resources.__file__)
     self.process = None
     self.logger = setup_logger('cloudify_agent.tests.utils.FileServer',
                                logger_level=logging.DEBUG)
     self.runner = LocalCommandRunner(self.logger)
示例#13
0
 def setUp(self):
     self.logger = setup_logger(self._testMethodName, logging.INFO)
     self.client = utils.create_rest_client()
     utils.restore_provider_context()
     TestEnvironment.start_celery_management_worker()
     self.test_logs_file = path.join(testenv_instance.events_and_logs_dir, "{0}.log".format(self.id()))
     testenv_instance.handle_logs = self._write_test_events_and_logs_to_file
示例#14
0
    def __init__(self,
                 user,
                 password,
                 protocol=None,
                 host=None,
                 port=None,
                 uri=None,
                 validate_connection=True,
                 logger=None):

        logger = logger or setup_logger('WinRMRunner')

        self.session_config = {
            'protocol': protocol or DEFAULT_WINRM_PROTOCOL,
            'host': host,
            'port': port or DEFAULT_WINRM_PORT,
            'uri': uri or DEFAULT_WINRM_URI,
            'user': user,
            'password': password
        }

        # Validations - [host, user, password]
        validate(self.session_config)

        self.session = self._create_session()
        self.logger = logger

        if validate_connection:
            self.validate_connection()
示例#15
0
 def setUpClass(cls):
     cls.logger = setup_logger('cloudify_agent.tests.api.test_utils')
     cls.file_server_resource_base = tempfile.mkdtemp(
         prefix='file-server-resource-base')
     cls.fs = test_utils.FileServer(root_path=cls.file_server_resource_base)
     cls.fs.start()
     cls.file_server_url = 'http://localhost:{0}'.format(cls.fs.port)
示例#16
0
    def setUpClass(cls):

        cls.logger = setup_logger(cls.__name__, logger_level=logging.DEBUG)
        cls.runner = LocalCommandRunner(cls.logger)

        cls.plugins_work_dir = tempfile.mkdtemp(
            prefix='plugins-work-dir-')
        cls.file_server_resource_base = tempfile.mkdtemp(
            prefix='file-server-resource-base-')
        cls.fs = test_utils.FileServer(
            root_path=cls.file_server_resource_base)
        cls.fs.start()
        cls.file_server_url = 'http://localhost:{0}'.format(cls.fs.port)

        cls.plugins_to_be_installed = [
            'mock-plugin',
            'mock-plugin-modified',
            'mock-plugin-with-requirements'
        ]

        cls.wagons = {}

        for plugin_dir in cls.plugins_to_be_installed:
            test_utils.create_plugin_tar(
                plugin_dir_name=plugin_dir,
                target_directory=cls.file_server_resource_base)
            cls.wagons[plugin_dir] = test_utils.create_plugin_wagon(
                plugin_dir_name=plugin_dir,
                target_directory=cls.file_server_resource_base)
示例#17
0
    def setUp(self):

        # change levels to 'DEBUG' to troubleshoot.
        self.logger = setup_logger(
            'cloudify-agent.tests',
            logger_level=logging.INFO)
        from cloudify_agent.api import utils
        utils.logger.setLevel(logging.INFO)

        self.curr_dir = os.getcwd()
        self.temp_folder = tempfile.mkdtemp(prefix='cfy-agent-tests-')
        for key, value in self.agent_env_vars.iteritems():
            os.environ[key] = value

        def clean_temp_folder():
            try:
                shutil.rmtree(self.temp_folder)
            except win_error:
                # no hard feeling if file is locked.
                pass

        def clean_env():
            for var in self.agent_env_vars.iterkeys():
                del os.environ[var]

        self.addCleanup(clean_temp_folder)
        self.addCleanup(clean_env)
        os.chdir(self.temp_folder)
        self.addCleanup(lambda: os.chdir(self.curr_dir))

        self.username = getpass.getuser()
        self.logger.info('Working directory: {0}'.format(self.temp_folder))
    def __init__(self,
                 logger=None,
                 host=None,
                 user=None,
                 key=None,
                 port=None,
                 password=None,
                 validate_connection=True,
                 fabric_env=None,
                 tmpdir=None):

        # logger
        self.logger = logger or setup_logger('fabric_runner')

        # silence paramiko
        logging.getLogger('paramiko.transport').setLevel(logging.WARNING)

        # connection details
        self.port = port or DEFAULT_REMOTE_EXECUTION_PORT
        self.password = password
        self.user = user
        self.host = host
        self.key = key
        self.tmpdir = tmpdir

        # fabric environment
        self.env = self._set_env()
        self.env.update(fabric_env or {})

        self._validate_ssh_config()
        if validate_connection:
            self.validate_connection()
示例#19
0
 def __init__(self,
              node_id=None,
              node_name=None,
              blueprint_id=None,
              deployment_id=None,
              execution_id=None,
              properties=None,
              runtime_properties=None,
              relationships=None,
              capabilities=None,
              related=None,
              source=None,
              target=None,
              operation=None,
              resources=None,
              tenant=None,
              rest_token=None,
              provider_context=None,
              bootstrap_context=None):
     tenant = tenant or {}
     super(MockCloudifyContext, self).__init__({
         'blueprint_id': blueprint_id,
         'deployment_id': deployment_id,
         'node_id': node_id,
         'node_name': node_name,
         'node_properties': properties,
         'operation': operation,
         'tenant': tenant,
         'rest_token': rest_token
     })
     self._node_id = node_id
     self._node_name = node_name
     self._deployment_id = deployment_id
     self._execution_id = execution_id
     self._properties = properties or {}
     self._runtime_properties = \
         runtime_properties if runtime_properties is not None else {}
     self._resources = resources or {}
     self._source = source
     self._target = target
     if capabilities and not isinstance(capabilities, ContextCapabilities):
         raise ValueError("MockCloudifyContext(capabilities=?) must be "
                          "instance of ContextCapabilities, not {0}".format(
                              capabilities))
     self._related = related
     self._provider_context = provider_context or {}
     self._bootstrap_context = bootstrap_context or BootstrapContext({})
     self._mock_context_logger = setup_logger('mock-context-logger')
     if node_id:
         self._instance = MockNodeInstanceContext(
             id=node_id,
             runtime_properties=self._runtime_properties,
             relationships=relationships)
         self._capabilities = capabilities or ContextCapabilities(
             self._endpoint, self._instance)
         self._node = MockNodeContext(node_name, properties)
     if self._source is None and self._target:
         self._source = MockContext({'instance': None, 'node': None})
示例#20
0
 def setUp(self):
     self.logger = setup_logger(self._testMethodName, logging.INFO)
     self.client = utils.create_rest_client()
     utils.restore_provider_context()
     TestEnvironment.start_celery_management_worker()
     self.test_logs_file = path.join(testenv_instance.events_and_logs_dir,
                                     '{0}.log'.format(self.id()))
     testenv_instance.handle_logs = \
         self._write_test_events_and_logs_to_file
示例#21
0
 def __init__(self,
              cloudify_agent,
              logger=None):
     self.cloudify_agent = cloudify_agent
     self.logger = logger or setup_logger(self.__class__.__name__)
     self.broker_get_settings_from_manager = cloudify_agent.get(
         'broker_get_settings_from_manager',
         True,
     )
def start_events_and_logs_polling(logs_handler_retriever=None):
    """
    Fetches events and logs from RabbitMQ.
    """
    if not RABBITMQ_POLLING_ENABLED:
        return

    setup_logger('pika', logging.INFO)
    connection = pika.BlockingConnection(
        pika.ConnectionParameters(host='localhost'))
    channel = connection.channel()
    queues = ['cloudify-events', 'cloudify-logs']
    for q in queues:
        channel.queue_declare(queue=q, auto_delete=True, durable=True,
                              exclusive=False)

    def callback(ch, method, properties, body):
        try:
            event = json.loads(body)
            if RABBITMQ_VERBOSE_MESSAGES_ENABLED:
                output = '\n{0}'.format(json.dumps(event, indent=4))
            else:
                output = create_event_message_prefix(event)
            if output:
                logger.info(output)
            if logs_handler_retriever:
                logs_handler_retriever()(output or '', event)
        except Exception as e:
            logger.error('event/log format error - output: {0} [message={1}]'
                         .format(body, e.message))
            s_traceback = StringIO.StringIO()
            traceback.print_exc(file=s_traceback)
            logger.error(s_traceback.getvalue())

    def consume():
        channel.basic_consume(callback, queue=queues[0], no_ack=True)
        channel.basic_consume(callback, queue=queues[1], no_ack=True)
        channel.start_consuming()
    logger.info("Starting RabbitMQ events/logs polling - queues={0}".format(
        queues))

    polling_thread = threading.Thread(target=consume)
    polling_thread.daemon = True
    polling_thread.start()
示例#23
0
    def setUpClass(cls):
        cls.logger = setup_logger(cls.__name__)
        cls.resource_base = tempfile.mkdtemp(
            prefix='file-server-resource-base')
        cls.fs = FileServer(
            root_path=cls.resource_base)
        cls.fs.start()

        cls.source_url = get_source_uri()
        cls.requirements_file = get_requirements_uri()
示例#24
0
 def __init__(self,
              node_id=None,
              node_name=None,
              blueprint_id=None,
              deployment_id=None,
              execution_id=None,
              properties=None,
              runtime_properties=None,
              relationships=None,
              capabilities=None,
              related=None,
              source=None,
              target=None,
              operation=None,
              resources=None,
              provider_context=None,
              bootstrap_context=None):
     super(MockCloudifyContext, self).__init__({
         'blueprint_id': blueprint_id,
         'deployment_id': deployment_id,
         'node_id': node_id,
         'node_name': node_name,
         'node_properties': properties,
         'operation': operation})
     self._node_id = node_id
     self._node_name = node_name
     self._deployment_id = deployment_id
     self._execution_id = execution_id
     self._properties = properties or {}
     self._runtime_properties = runtime_properties or {}
     self._resources = resources or {}
     self._source = source
     self._target = target
     if capabilities and not isinstance(capabilities, ContextCapabilities):
         raise ValueError(
             "MockCloudifyContext(capabilities=?) must be "
             "instance of ContextCapabilities, not {0}".format(
                 capabilities))
     self._related = related
     self._provider_context = provider_context or {}
     self._bootstrap_context = bootstrap_context or BootstrapContext({})
     self._mock_context_logger = setup_logger('mock-context-logger')
     if node_id:
         self._instance = MockNodeInstanceContext(
             id=node_id,
             runtime_properties=self._runtime_properties,
             relationships=relationships)
         self._capabilities = capabilities or ContextCapabilities(
             self._endpoint, self._instance)
         self._node = MockNodeContext(node_name, properties)
     if self._source is None and self._target:
         self._source = MockContext({
             'instance': None,
             'node': None
         })
示例#25
0
    def setUp(self):
        super(BaseShellTest, self).setUp()
        self.logger = setup_logger('cloudify-agent.tests.shell',
                                   logger_level=logging.DEBUG)

        utils.logger.setLevel(logging.DEBUG)

        self.currdir = os.getcwd()
        self.workdir = tempfile.mkdtemp(prefix='cfy-agent-shell-tests-')
        self.logger.info('Working directory: {0}'.format(self.workdir))
        os.chdir(self.workdir)
 def _get_ctx_mock(self, instance_id, boot):
     rel_specs = [MockRelationshipContext(
         target=MockRelationshipSubjectContext(node=MockNodeContext(
             properties={'boot': boot}), instance=MockNodeInstanceContext(
             runtime_properties={
                 OPENSTACK_TYPE_PROPERTY: VOLUME_OPENSTACK_TYPE,
                 OPENSTACK_ID_PROPERTY: instance_id
             })))]
     ctx = mock.MagicMock()
     ctx.instance = MockNodeInstanceContext(relationships=rel_specs)
     ctx.logger = setup_logger('mock-logger')
     return ctx
示例#27
0
    def setUpClass(cls):
        cls.logger = setup_logger(cls.__name__)
        cls.resource_base = tempfile.mkdtemp(
            prefix='file-server-resource-base')
        cls.fs = FileServer(
            root_path=cls.resource_base)
        cls.fs.start()
        project_dir = os.path.dirname(
            os.path.dirname(cloudify_agent.__file__))

        cls.source_url = project_dir
        cls.requirements_file = os.path.join(
            project_dir, 'dev-requirements.txt')
示例#28
0
    def setUp(self):
        super(BaseShellTest, self).setUp()
        self.logger = setup_logger(
            'cloudify-agent.tests.shell',
            logger_level=logging.DEBUG)

        utils.logger.setLevel(logging.DEBUG)

        self.currdir = os.getcwd()
        self.workdir = tempfile.mkdtemp(
            prefix='cfy-agent-shell-tests-')
        self.logger.info('Working directory: {0}'.format(self.workdir))
        os.chdir(self.workdir)
 def _get_ctx_mock(self, instance_id, boot):
     rel_specs = [MockRelationshipContext(
         target=MockRelationshipSubjectContext(node=MockNodeContext(
             properties={'boot': boot}), instance=MockNodeInstanceContext(
             runtime_properties={
                 OPENSTACK_TYPE_PROPERTY: VOLUME_OPENSTACK_TYPE,
                 OPENSTACK_ID_PROPERTY: instance_id,
                 VOLUME_BOOTABLE: False
             })))]
     ctx = mock.MagicMock()
     ctx.instance = MockNodeInstanceContext(relationships=rel_specs)
     ctx.logger = setup_logger('mock-logger')
     return ctx
示例#30
0
    def setUp(self):

        # change levels to 'DEBUG' to troubleshoot.
        self.logger = setup_logger("cloudify-agent.tests", logger_level=logging.INFO)
        from cloudify_agent.api import utils

        utils.logger.setLevel(logging.INFO)

        self.curr_dir = os.getcwd()
        self.temp_folder = tempfile.mkdtemp(prefix="cfy-agent-tests-")
        self.username = getpass.getuser()
        self.logger.info("Working directory: {0}".format(self.temp_folder))
        os.chdir(self.temp_folder)
示例#31
0
    def setUp(self):
        super(BaseTest, self).setUp()
        self.temp_folder = tempfile.mkdtemp(prefix='cfy-agent-tests-')
        self._rest_cert_path = agent_ssl_cert.get_local_cert_path(
            self.temp_folder)

        agent_env_vars = {
            constants.MANAGER_FILE_SERVER_URL_KEY: 'localhost',
            constants.REST_HOST_KEY: 'localhost',
            constants.REST_PORT_KEY: '80',
            constants.BROKER_SSL_CERT_PATH: self._rest_cert_path,
            constants.LOCAL_REST_CERT_FILE_KEY: self._rest_cert_path,
            constants.MANAGER_FILE_SERVER_ROOT_KEY: 'localhost/resources'
        }

        # change levels to 'DEBUG' to troubleshoot.
        self.logger = setup_logger(
            'cloudify-agent.tests',
            logger_level=logging.INFO)
        from cloudify_agent.api import utils
        utils.logger.setLevel(logging.INFO)

        self.curr_dir = os.getcwd()
        for key, value in agent_env_vars.items():
            os.environ[key] = value

        def clean_folder(folder_name):
            try:
                shutil.rmtree(folder_name)
            except win_error:
                # no hard feeling if file is locked.
                pass

        def clean_storage_dir():
            if os.path.exists(get_storage_directory()):
                clean_folder(get_storage_directory())

        def clean_env():
            for var in agent_env_vars:
                del os.environ[var]

        self.addCleanup(clean_folder, folder_name=self.temp_folder)
        self.addCleanup(clean_storage_dir)
        self.addCleanup(clean_env)
        os.chdir(self.temp_folder)
        self.addCleanup(lambda: os.chdir(self.curr_dir))

        self.username = getpass.getuser()
        self.logger.info('Working directory: {0}'.format(self.temp_folder))

        self.mock_ctx_with_tenant()
 def setUp(self):
     self.logger = setup_logger('InstallerTest')
     config_path = os.environ.get('CONFIG_PATH')
     self.logger.info('Config: {0}'.format(config_path))
     with open(config_path) as config_file:
         self.config = yaml.load(config_file)
     self.logger.info(str(self.config))
     current_ctx.set(MockCloudifyContext())
     self.runner = LocalCommandRunner(self.logger)
     self.base_dir = tempfile.mkdtemp()
     self.logger.info('Base dir: {0}'.format(self.base_dir))
     _, self.script_path = tempfile.mkstemp(dir=self.base_dir,
                                            suffix='.py')
     install_utils.prepare_script({}, self.script_path)
示例#33
0
    def setUp(self):
        self.temp_folder = tempfile.mkdtemp(prefix='cfy-agent-tests-')
        self._rest_cert_path = agent_ssl_cert.get_local_cert_path(
            self.temp_folder)

        agent_env_vars = {
            constants.MANAGER_FILE_SERVER_URL_KEY: 'localhost',
            constants.REST_HOST_KEY: 'localhost',
            constants.REST_PORT_KEY: '80',
            constants.BROKER_SSL_CERT_PATH: self._rest_cert_path,
            constants.LOCAL_REST_CERT_FILE_KEY: self._rest_cert_path,
            constants.MANAGER_FILE_SERVER_ROOT_KEY: 'localhost/resources'
        }

        # change levels to 'DEBUG' to troubleshoot.
        self.logger = setup_logger(
            'cloudify-agent.tests',
            logger_level=logging.INFO)
        from cloudify_agent.api import utils
        utils.logger.setLevel(logging.INFO)

        self.curr_dir = os.getcwd()
        for key, value in agent_env_vars.iteritems():
            os.environ[key] = value

        def clean_folder(folder_name):
            try:
                shutil.rmtree(folder_name)
            except win_error:
                # no hard feeling if file is locked.
                pass

        def clean_storage_dir():
            if os.path.exists(get_storage_directory()):
                clean_folder(get_storage_directory())

        def clean_env():
            for var in agent_env_vars.iterkeys():
                del os.environ[var]

        self.addCleanup(clean_folder, folder_name=self.temp_folder)
        self.addCleanup(clean_storage_dir)
        self.addCleanup(clean_env)
        os.chdir(self.temp_folder)
        self.addCleanup(lambda: os.chdir(self.curr_dir))

        self.username = getpass.getuser()
        self.logger.info('Working directory: {0}'.format(self.temp_folder))

        self.mock_ctx_with_tenant()
示例#34
0
    def __init__(self, session_config, validate_connection=True, logger=None):

        logger = logger or setup_logger('WinRMRunner')

        # Validations - [host, user, password]
        validate(session_config)

        # Defaults - [protocol, uri, port]
        defaults(session_config)

        self.session_config = session_config
        self.session = self._create_session()
        self.logger = logger

        if validate_connection:
            self.test_connectivity()
示例#35
0
def send_snmp_trap(event_context, **kwargs):
    notification_type = _create_notification_type(event_context)
    destination_address = kwargs['destination_address']
    destination_port = kwargs['destination_port']
    community_string = kwargs['community_string']

    error_indication, _, _, _ = next(
        sendNotification(
            SnmpEngine(), CommunityData(community_string, mpModel=1),
            UdpTransportTarget((destination_address, destination_port)),
            ContextData(), NOTIFY_TYPE, notification_type))

    logger = setup_logger('cloudify.snmp.snmp_trap')
    if error_indication:
        logger.error(error_indication)

    logger.info(
        'Sent SNMP trap of the event: {0} and the execution_id: {1}'.format(
            event_context['event_type'], event_context['execution_id']))
示例#36
0
    def __init__(self,
                 session_config,
                 validate_connection=True,
                 logger=None):

        logger = logger or setup_logger('WinRMRunner')

        # Validations - [host, user, password]
        validate(session_config)

        # Defaults - [protocol, uri, port]
        defaults(session_config)

        self.session_config = session_config
        self.session = self._create_session()
        self.logger = logger

        if validate_connection:
            self.test_connectivity()
示例#37
0
    def __init__(self,
                 is_shell,
                 logger=None,
                 conn_cmd=None,
                 sh_cmd="/bin/sh",
                 validate_connection=True):

        assert is_shell is True

        # logger
        self.logger = logger or setup_logger('shell_runner')
        self._conn_cmd = conn_cmd
        self._sh_cmd = sh_cmd

        if validate_connection:
            self.validate_connection()

        self.check_and_install_program('sudo')
        self.check_and_install_program('wget')
        self.check_and_install_program('rsync')
        self.check_and_install_program('python')
示例#38
0
 def __init__(self, storage):
     self._context = {}
     self.local = True
     self._endpoint = storage
     self._local_task_thread_pool_size = 1
     self._task_retry_interval = 1
     self._task_retries = 1
     self._subgraph_retries = 1
     self._mock_context_logger = \
         setup_logger('mock-context-logger')
     handler = \
         LocalCloudifyWorkflowContextHandler(self, storage)
     self.internal = \
         CloudifyWorkflowContextInternal(self, handler)
     raw_nodes = storage.get_nodes()
     raw_node_instances = storage.get_node_instances()
     self._nodes = dict(
         (node.id, CloudifyWorkflowNode(self, node, self))
         for node in raw_nodes)
     self._node_instances = dict(
         (instance.id, CloudifyWorkflowNodeInstance(
             self, self._nodes[instance.node_id], instance,
             self))
         for instance in raw_node_instances)
示例#39
0
    def setUp(self):

        # change levels to 'DEBUG' to troubleshoot.
        self.logger = setup_logger('cloudify-agent.tests',
                                   logger_level=logging.INFO)
        from cloudify_agent.api import utils
        utils.logger.setLevel(logging.INFO)

        self.curr_dir = os.getcwd()
        self.temp_folder = tempfile.mkdtemp(prefix='cfy-agent-tests-')

        def clean_temp_folder():
            try:
                shutil.rmtree(self.temp_folder)
            except win_error:
                # no hard feeling if file is locked.
                pass

        self.addCleanup(clean_temp_folder)
        os.chdir(self.temp_folder)
        self.addCleanup(lambda: os.chdir(self.curr_dir))

        self.username = getpass.getuser()
        self.logger.info('Working directory: {0}'.format(self.temp_folder))
示例#40
0
import subprocess
import sys
import time

import pika

from cloudify.utils import setup_logger
from cloudify_agent.app import app as celery_client

from testenv.constants import FILE_SERVER_PORT
from testenv.constants import MANAGER_REST_PORT
from testenv.constants import FILE_SERVER_BLUEPRINTS_FOLDER
from testenv.constants import FILE_SERVER_DEPLOYMENTS_FOLDER


logger = setup_logger('celery_worker_process')


class CeleryWorkerProcess(object):

    def __init__(self,
                 queues,
                 test_working_dir,
                 additional_includes=None,
                 name=None,
                 hostname=None,
                 manager_rest_port=MANAGER_REST_PORT,
                 concurrency=1):

        self.test_working_dir = test_working_dir
        self.name = name or queues[0]
import mock_plugins
from testenv.constants import MANAGER_REST_PORT
from testenv.constants import RABBITMQ_VERBOSE_MESSAGES_ENABLED
from testenv.constants import RABBITMQ_POLLING_ENABLED
from testenv.constants import FILE_SERVER_RESOURCES_URI
from testenv.constants import FILE_SERVER_UPLOADED_BLUEPRINTS_FOLDER
from testenv.constants import FILE_SERVER_BLUEPRINTS_FOLDER
from testenv.constants import FILE_SERVER_DEPLOYMENTS_FOLDER
from testenv.processes.elastic import ElasticSearchProcess
from testenv.processes.manager_rest import ManagerRestProcess
from testenv.processes.riemann import RiemannProcess
from testenv.processes.celery import CeleryWorkerProcess
from testenv import utils

logger = setup_logger('TESTENV')
setup_logger('cloudify.rest_client', logging.INFO)
testenv_instance = None


class TestCase(unittest.TestCase):

    """
    A test case for cloudify integration tests.
    """

    def setUp(self):
        self.logger = setup_logger(self._testMethodName, logging.INFO)
        self.client = utils.create_rest_client()
        self.es_db_client = utils.create_es_db_client()
        utils.restore_provider_context()
import logging
import tempfile

from cloudify.utils import setup_logger

from manager_rest.storage import db, models
from manager_rest.constants import SECURITY_FILE_LOCATION
from manager_rest.flask_utils import setup_flask_app as _setup_flask_app

from integration_tests.framework import constants, utils
from integration_tests.framework.docl import execute, copy_file_to_manager, \
    read_file as read_manager_file
from integration_tests.tests.constants import PROVIDER_CONTEXT
from integration_tests.tests.utils import get_resource

logger = setup_logger('Flask Utils', logging.INFO)
security_config = None
SCRIPT_PATH = '/tmp/reset_storage.py'
CONFIG_PATH = '/tmp/reset_storage_config.json'


def prepare_reset_storage_script():
    reset_script = get_resource('scripts/reset_storage.py')
    copy_file_to_manager(reset_script, SCRIPT_PATH)
    with tempfile.NamedTemporaryFile(delete=False) as f:
        json.dump(
            {
                'config': {
                    '': constants.CONFIG_FILE_LOCATION,
                    'security': SECURITY_FILE_LOCATION,
                    'authorization': constants.AUTHORIZATION_FILE_LOCATION
示例#43
0
import pkg_resources
from jinja2 import Template
from base64 import urlsafe_b64encode

from cloudify.context import BootstrapContext
from cloudify.workflows import tasks as workflows_tasks

from cloudify.utils import setup_logger

from cloudify_rest_client import CloudifyClient

import cloudify_agent
from cloudify_agent import VIRTUALENV
from cloudify_agent.api import defaults

logger = setup_logger('cloudify_agent.api.utils')


class _Internal(object):

    """
    Contains various internal utility methods. Import this at your own
    peril, as backwards compatibility is not guaranteed.
    """

    CLOUDIFY_DAEMON_NAME_KEY = 'CLOUDIFY_DAEMON_NAME'
    CLOUDIFY_DAEMON_STORAGE_DIRECTORY_KEY = 'CLOUDIFY_DAEMON_STORAGE_DIRECTORY'
    CLOUDIFY_DAEMON_USER_KEY = 'CLOUDIFY_DAEMON_USER'

    @classmethod
    def get_daemon_name(cls):
示例#44
0
#    * limitations under the License.

import os
import re
import subprocess
import threading
import time
import requests

from os import path
from testenv.constants import MANAGER_REST_PORT
from testenv.utils import get_resource
from cloudify.utils import setup_logger


logger = setup_logger('riemann_process')


class RiemannProcess(object):
    """
    Manages a riemann server process lifecycle.
    """

    def __init__(self, config_path, libs_path):
        self._config_path = config_path
        self._libs_path = libs_path
        self.pid = None
        self._process = None
        self._detector = None
        self._event = None
        self._riemann_logs = list()
示例#45
0
 def setUpClass(cls):
     cls.logger = setup_logger('CliCommandTest')
示例#46
0
#    * See the License for the specific language governing permissions and
#    * limitations under the License.

import logging
import re
import shlex
import subprocess
import time
import sys
import os
import elasticsearch

from cloudify.utils import setup_logger
from testenv.constants import STORAGE_INDEX_NAME

logger = setup_logger('elasticsearch_process')


class ElasticSearchProcess(object):
    """
    Manages an ElasticSearch server process lifecycle.
    """
    def __init__(self):
        self._pid = None
        self._process = None
        setup_logger('elasticsearch', logging.INFO)
        setup_logger('elasticsearch.trace', logging.INFO)

    @staticmethod
    def _verify_service_responsiveness(timeout=120):
        import urllib2
示例#47
0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import logging
from contextlib import closing

import psycopg2
from cloudify.utils import setup_logger
from integration_tests.framework import docker
from manager_rest.flask_utils import get_postgres_conf
from manager_rest.storage import db

logger = setup_logger('postgresql', logging.INFO)
setup_logger('postgresql.trace', logging.INFO)


def run_query(container_id, query, db_name=None, fetch_results=True):
    conf = get_postgres_conf()
    manager_ip = docker.get_manager_ip(container_id)

    db_name = db_name or conf.db_name
    with psycopg2.connect(database=db_name,
                          user=conf.username,
                          password=conf.password,
                          host=manager_ip) as con:
        con.autocommit = True
        logger.info('Trying to execute SQL query: ' + query)
        with closing(con.cursor()) as cur:
示例#48
0
import tarfile
from contextlib import contextmanager

from wagon import wagon
from agent_packager import packager

from cloudify.exceptions import NonRecoverableError
from cloudify.utils import LocalCommandRunner
from cloudify.utils import setup_logger

import cloudify_agent

from cloudify_agent import VIRTUALENV
from cloudify_agent.tests import resources

logger = setup_logger('cloudify_agent.tests.utils')


@contextmanager
def env(key, value):
    os.environ[key] = value
    yield
    del os.environ[key]


def create_plugin_tar(plugin_dir_name, target_directory):

    """
    Create a tar file from the plugin.

    :param plugin_dir_name: the plugin directory name, relative to the
示例#49
0
import os

from cloudify.utils import setup_logger

import cloudify_agent
from cloudify_agent.api import utils
from cloudify_agent.api import defaults
from cloudify_agent.api.pm.base import Daemon

logger = setup_logger('cloudify_agent.tests.api.test_utils')


def test_get_absolute_resource_path():
    full_path = utils.get_absolute_resource_path(
        os.path.join('pm', 'nssm', 'nssm.exe'))
    expected = os.path.join(os.path.dirname(cloudify_agent.__file__),
                            'resources', 'pm', 'nssm', 'nssm.exe')
    assert expected == full_path


def test_daemon_to_dict(agent_ssl_cert):
    daemon = Daemon(rest_host=['127.0.0.1'],
                    name='name',
                    queue='queue',
                    broker_ip=['127.0.0.1'],
                    local_rest_cert_file=agent_ssl_cert.local_cert_path())
    daemon_json = utils.internal.daemon_to_dict(daemon)
    assert daemon_json['rest_host'] == ['127.0.0.1']
    assert daemon_json['broker_ip'] == ['127.0.0.1']
    assert daemon_json['name'] == 'name'
    assert daemon_json['queue'] == 'queue'
示例#50
0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
############

import os
import sys

from cloudify.utils import setup_logger

from cloudify_cli import cli
from cloudify_cli.utils import DEFAULT_LOG_FILE

runner_lgr = setup_logger('cli_runner')


def run_cli_expect_system_exit_0(command):
    run_cli_expect_system_exit_code(command, expected_code=0)


def run_cli_expect_system_exit_1(command):
    run_cli_expect_system_exit_code(command, expected_code=1)


def run_cli_expect_system_exit_code(command, expected_code):
    try:
        run_cli(command)
    except SystemExit as e:
        assert e.code == expected_code
示例#51
0
from cloudify import dispatch
from cloudify import exceptions as cloudify_exceptions
from cloudify.state import ctx, current_ctx
from cloudify.utils import setup_logger, target_plugin_prefix
from cloudify.exceptions import NonRecoverableError
from cloudify_rest_client.plugins import Plugin
from cloudify_rest_client.constants import VisibilityState

from cloudify_agent.api import exceptions
from cloudify_agent.api.plugins import installer

from cloudify_agent.tests import resources
from cloudify_agent.tests import utils as test_utils
from cloudify_agent.tests import plugins

logger = setup_logger('api.plugins.test_installer', logger_level=logging.DEBUG)


@pytest.mark.only_rabbit
def test_install_from_source(test_plugins, file_server):
    with _patch_client([]):
        installer.install(
            plugins.plugin_struct(file_server, source='mock-plugin.tar'))
    _assert_task_runnable('mock_plugin.tasks.run', expected_return='run')
    _assert_task_runnable('mock_plugin.tasks.call_entry_point',
                          expected_return='mock-plugin-entry-point')


@pytest.mark.only_rabbit
def test_install_from_source_with_deployment_id(test_plugins, file_server):
    deployment_id = 'deployment'
示例#52
0
 def __init__(self, logger=None):
     self.logger = logger or setup_logger(self.__class__.__name__)
     self.runner = LocalCommandRunner(logger=self.logger)
示例#53
0
 def __init__(self, logger=None):
     self.logger = logger or setup_logger(self.__class__.__name__)
     self.runner = LocalCommandRunner(logger=self.logger)
 def setUpClass(cls):
     cls.logger = setup_logger(
         'cloudify_agent.system_tests.manager.test_agent_installer')
示例#55
0
    def __init__(self, logger=None, **params):
        """

        ####################################################################
        # When subclassing this, do not implement any logic inside the
        # constructor except for in-memory calculations and settings, as the
        # daemon may be instantiated many times for an existing agent. Also,
        # all daemon attributes must be JSON serializable, as daemons are
        # represented as dictionaries and stored as JSON files on Disk. If
        # you wish to have a non serializable attribute, mark it private by
        # naming it _<name>. Attributes starting with underscore will be
        # omitted when serializing the object.
        ####################################################################

        :param logger: a logger to be used to log various subsequent
        operations.
        :type logger: logging.Logger

        :param params: key-value pairs as stated above.
        :type params dict

        """

        # will be populated later on with runtime properties of the host
        # node instance this agent is dedicated for (if needed)
        self._runtime_properties = None

        # configure logger
        self._logger = logger or setup_logger(
            logger_name='cloudify_agent.api.pm.{0}'.format(
                self.PROCESS_MANAGEMENT))

        # save params
        self._params = params

        # configure command runner
        self._runner = LocalCommandRunner(logger=self._logger)

        # Mandatory parameters
        self.validate_mandatory()
        self.rest_host = params['rest_host']
        self.broker_ip = params['broker_ip']
        self.local_rest_cert_file = params['local_rest_cert_file']
        self.cluster = params.get('cluster', [])

        # Optional parameters - REST client
        self.validate_optional()
        self.rest_port = params.get('rest_port', defaults.INTERNAL_REST_PORT)
        # REST token needs to be prefixed with _ so it's not stored
        # when the daemon is serialized
        self._rest_token = params.get('rest_token')
        self._rest_tenant = params.get('rest_tenant')

        # Optional parameters
        self.name = params.get('name') or self._get_name_from_manager()
        self.user = params.get('user') or getpass.getuser()
        self.broker_ssl_enabled = params.get('broker_ssl_enabled', True)
        if self.broker_ssl_enabled:
            self.broker_ssl_cert_path = params['local_rest_cert_file']
            with open(self.broker_ssl_cert_path) as cert_file:
                self.broker_ssl_cert_content = cert_file.read()
        else:
            self.broker_ssl_cert_path = ''
            self.broker_ssl_cert_content = ''
        # Port must be determined after SSL enabled has been set in order for
        # intelligent port selection to work properly
        self.broker_port = self._get_broker_port()
        self.broker_user = params.get('broker_user', 'guest')
        self.broker_pass = params.get('broker_pass', 'guest')
        self.host = params.get('host')
        self.deployment_id = params.get('deployment_id')
        self.queue = params.get('queue') or self._get_queue_from_manager()

        # This is not retrieved by param as an option any more as it then
        # introduces ambiguity over which values should be used if the
        # components of this differ from the passed in broker_user, pass, etc
        # These components need to be known for the _delete_amqp_queues
        # function.
        if self.cluster:
            self.broker_url = [
                defaults.BROKER_URL.format(
                    host=node['broker_ip'],
                    port=self.broker_port,  # not set in provider context
                    username=node['broker_user'],
                    password=node['broker_pass'],
                ) for node in self.cluster
            ]
        else:
            self.broker_url = defaults.BROKER_URL.format(
                host=self.broker_ip,
                port=self.broker_port,
                username=self.broker_user,
                password=self.broker_pass,
            )
        self.min_workers = params.get('min_workers') or defaults.MIN_WORKERS
        self.max_workers = params.get('max_workers') or defaults.MAX_WORKERS
        self.workdir = params.get('workdir') or os.getcwd()

        self.extra_env_path = params.get('extra_env_path')
        self.log_level = params.get('log_level') or defaults.LOG_LEVEL
        self.log_file = params.get('log_file') or os.path.join(
            self.workdir, '{0}.log'.format(self.name))
        self.pid_file = params.get('pid_file') or os.path.join(
            self.workdir, '{0}.pid'.format(self.name))

        # create working directory if its missing
        if not os.path.exists(self.workdir):
            self._logger.debug('Creating directory: {0}'.format(self.workdir))
            os.makedirs(self.workdir)

        # save as attributes so that they will be persisted in the json files.
        # we will make use of these values when loading agents by name.
        self.process_management = self.PROCESS_MANAGEMENT
        self.virtualenv = VIRTUALENV
        self.cluster_settings_path = params.get('cluster_settings_path')
示例#56
0
from os import kill, sep
from os.path import join
from shutil import move
from signal import SIGHUP
from subprocess import check_output

import json
import re
import yaml

from jinja2 import Template
from tempfile import NamedTemporaryFile

from cloudify.utils import setup_logger

logger = setup_logger('cloudify.monitoring')

PROMETHEUS_CONFIG_DIR = join(
    sep,
    'etc',
    'prometheus',
)
PROMETHEUS_TARGETS_DIR = join(PROMETHEUS_CONFIG_DIR, 'targets')
PROMETHEUS_ALERTS_DIR = join(PROMETHEUS_CONFIG_DIR, 'alerts')
PROMETHEUS_TARGETS_TEMPLATE = '- targets: {target_addresses}\n'\
                              '  labels: {target_labels}'
PROMETHEUS_MISSING_ALERT = Template("""groups:
  - name: {{ name }}
    rules:{% for host in hosts %}
      - alert: {{ name }}_missing
        expr: absent({{ name }}_healthy{host="{{ host }}"})
示例#57
0
from multiprocessing import Process
from contextlib import contextmanager

import sh
import pika
import wagon

import constants

from cloudify.utils import setup_logger
from cloudify_cli import env as cli_env
from cloudify_rest_client import CloudifyClient
from manager_rest.utils import create_auth_header
from cloudify_cli.constants import CLOUDIFY_BASE_DIRECTORY_NAME

logger = setup_logger('testenv.utils')


def _write(stream, s):
    try:
        s = s.encode('utf-8')
    except UnicodeDecodeError:
        pass
    stream.write(s)


def sh_bake(command):
    return command.bake(_out=lambda line: _write(sys.stdout, line),
                        _err=lambda line: _write(sys.stderr, line))

示例#58
0
import os
import tempfile
import shutil
import pip

from os import path
from cloudify import utils
from cloudify.constants import VIRTUALENV_PATH_KEY
from cloudify.constants import CELERY_WORK_DIR_PATH_KEY
from cloudify.exceptions import NonRecoverableError
from cloudify.utils import LocalCommandRunner
from cloudify.utils import setup_logger
from cloudify.decorators import operation


logger = setup_logger('plugin_installer.tasks')
manager_branch = 'master'


@operation
def install(ctx, plugins, **kwargs):

    """
    Installs the given plugins.
    Each method decorated with the 'cloudify.decorators.operation'
    will be registered as a task.

    :param ctx: Invocation context. See class CloudifyContext @context.py
    :param plugins: A collection of plugins to install.
    """