Ejemplo n.º 1
0
 def __init__(self):
     self._pid = None
     self._process = None
     setup_default_logger('elasticsearch',
                          logging.INFO)
     setup_default_logger('elasticsearch.trace',
                          logging.INFO)
Ejemplo n.º 2
0
def start_events_and_logs_polling():
    """
    Fetches events and logs from RabbitMQ.
    """
    if not RABBITMQ_POLLING_ENABLED:
        return

    setup_default_logger('pika', logging.INFO)
    connection = pika.BlockingConnection(
        pika.ConnectionParameters(host='localhost'))
    channel = connection.channel()
    queues = ['cloudify-events', 'cloudify-logs']
    for q in queues:
        channel.queue_declare(queue=q, auto_delete=True, durable=True,
                              exclusive=False)

    def callback(ch, method, properties, body):
        try:
            output = json.loads(body)
            if RABBITMQ_VERBOSE_MESSAGES_ENABLED:
                output = '\n{0}'.format(json.dumps(output, indent=4))
            else:
                output = create_event_message_prefix(output)
            logger.info(output)
        except Exception as e:
            logger.error('event/log format error - output: {0} [message={1}]'
                         .format(body, e.message))
            s_traceback = StringIO.StringIO()
            traceback.print_exc(file=s_traceback)
            logger.error(s_traceback.getvalue())

    def consume():
        channel.basic_consume(callback, queue=queues[0], no_ack=True)
        channel.basic_consume(callback, queue=queues[1], no_ack=True)
        channel.start_consuming()
    logger.info("Starting RabbitMQ events/logs polling - queues={0}".format(
        queues))

    polling_thread = threading.Thread(target=consume)
    polling_thread.daemon = True
    polling_thread.start()
Ejemplo n.º 3
0
def start_events_and_logs_polling():
    """
    Fetches events and logs from RabbitMQ.
    """
    if not RABBITMQ_POLLING_ENABLED:
        return

    setup_default_logger('pika', logging.INFO)
    connection = pika.BlockingConnection(
        pika.ConnectionParameters(host='localhost'))
    channel = connection.channel()
    queues = ['cloudify-events', 'cloudify-logs']
    for q in queues:
        channel.queue_declare(queue=q, auto_delete=True, durable=True,
                              exclusive=False)

    def callback(ch, method, properties, body):
        try:
            output = json.loads(body)
            if RABBITMQ_VERBOSE_MESSAGES_ENABLED:
                output = '\n{0}'.format(json.dumps(output, indent=4))
            else:
                output = create_event_message_prefix(output)
            logger.info(output)
        except Exception as e:
            logger.error('event/log format error - output: {0} [message={1}]'
                         .format(body, e.message))
            s_traceback = StringIO.StringIO()
            traceback.print_exc(file=s_traceback)
            logger.error(s_traceback.getvalue())

    def consume():
        channel.basic_consume(callback, queue=queues[0], no_ack=True)
        channel.basic_consume(callback, queue=queues[1], no_ack=True)
        channel.start_consuming()
    logger.info("Starting RabbitMQ events/logs polling - queues={0}".format(
        queues))

    polling_thread = threading.Thread(target=consume)
    polling_thread.daemon = True
    polling_thread.start()
Ejemplo n.º 4
0
    def __init__(self, session_config, logger=None):

        logger = logger or setup_default_logger('WinRMRunner')

        # Validations - [host, user, password]
        validate(session_config)

        # Defaults - [protocol, uri, port]
        defaults(session_config)  # Validations - [host, user, password]

        self.session_config = session_config
        self.session = self._create_session()
        self.logger = logger
Ejemplo n.º 5
0
    def __init__(
            self,
            session_config,
            logger=None):

        logger = logger or setup_default_logger('WinRMRunner')

        # Validations - [host, user, password]
        validate(session_config)

        # Defaults - [protocol, uri, port]
        defaults(session_config)  # Validations - [host, user, password]

        self.session_config = session_config
        self.session = self._create_session()
        self.logger = logger
Ejemplo n.º 6
0
    def setUpClass(cls):
        # copy provider to provider directory
        # this creates the directory as well
        shutil.copytree('{0}/resources/providers/mock_provider/'
                        .format(THIS_DIR), TEST_PROVIDERS_DIR)
        shutil.copy(
            '{0}/resources/providers/mock_provider_with_cloudify_prefix'
            '/cloudify_mock_provider_with_cloudify_prefix.py'
            .format(THIS_DIR), TEST_PROVIDERS_DIR
        )

        # append providers to path
        # so that its importable
        sys.path.append(TEST_PROVIDERS_DIR)

        cls.logger = setup_default_logger('CliCommandTest')
Ejemplo n.º 7
0
import os
import tempfile
import shutil
import pip

from os import path
from cloudify import utils
from cloudify.constants import VIRTUALENV_PATH_KEY
from cloudify.constants import CELERY_WORK_DIR_PATH_KEY
from cloudify.exceptions import NonRecoverableError
from cloudify.utils import LocalCommandRunner
from cloudify.utils import setup_default_logger
from cloudify.decorators import operation


logger = setup_default_logger('plugin_installer.tasks')
manager_branch = 'master'


@operation
def install(ctx, plugins, **kwargs):

    """
    Installs the given plugins.
    Each method decorated with the 'cloudify.decorators.operation'
    will be registered as a task.

    :param ctx: Invocation context. See class CloudifyContext @context.py
    :param plugins: A collection of plugins to install.
    """
Ejemplo n.º 8
0
            'task_retries': 0,
            'task_retry_interval': 0
        }
    }
}
PROVIDER_NAME = 'integration_tests'


celery = Celery(broker='amqp://',
                backend='amqp://')
celery.conf.update(
    CELERY_TASK_SERIALIZER="json"
)


logger = setup_default_logger('testenv.utils')


def task_exists(name, *args):
    logger.info('task_exists invoked with : {0}'
                .format(args))
    if 'non_existent' in name:
        raise RuntimeError()
    return True


def deploy_application(dsl_path,
                       timeout_seconds=30,
                       blueprint_id=None,
                       deployment_id=None,
                       wait_for_execution=True):
Ejemplo n.º 9
0
from mock import Mock
from nose.tools import nottest

from cloudify import constants
from cloudify.mocks import MockCloudifyContext
from cloudify.utils import setup_default_logger

from windows_agent_installer import tasks, WinRMRunner
from windows_agent_installer.tests import TEST_MACHINE_IP_ENV_VARIABLE
from windows_agent_installer.tasks import AGENT_INCLUDES
from windows_agent_installer.tests.test_winrm_runner import WinRMRunnerTest


PACKAGE_URL = 'https://dl.dropboxusercontent.com/u/3588656/Cloudify.exe'

logger = setup_default_logger('test_tasks')

# Configure mocks
tasks.get_agent_package_url = lambda: PACKAGE_URL
tasks.utils.get_manager_ip = lambda: 'localhost'
tasks.utils.get_manager_file_server_blueprints_root_url = lambda: 'localhost'
tasks.utils.get_manager_file_server_url = lambda: 'localhost'
tasks.utils.get_manager_rest_service_port = lambda: 8080

attempts = 0


def get_worker_stats(worker_name):
    logger.info('Retrieving worker {0} stats'
                .format(worker_name))
    global attempts
Ejemplo n.º 10
0
#    * limitations under the License.

import logging
import re
import shlex
import subprocess
import time
import sys
import os
import elasticsearch

from cloudify.utils import setup_default_logger
from testenv.constants import STORAGE_INDEX_NAME


logger = setup_default_logger('elasticsearch_process')


class ElasticSearchProcess(object):
    """
    Manages an ElasticSearch server process lifecycle.
    """

    def __init__(self):
        self._pid = None
        self._process = None
        setup_default_logger('elasticsearch',
                             logging.INFO)
        setup_default_logger('elasticsearch.trace',
                             logging.INFO)
Ejemplo n.º 11
0
import os
import shutil
import subprocess
import sys
import time

from os import path
from os.path import dirname
from cloudify.utils import setup_default_logger
from testenv.constants import FILE_SERVER_PORT
from testenv.constants import MANAGER_REST_PORT
from testenv.constants import FILE_SERVER_BLUEPRINTS_FOLDER
from cloudify.celery import celery as celery_client

logger = setup_default_logger('celery_worker_process')


class CeleryWorkerProcess(object):

    # populated by start
    pids = []

    def __init__(self,
                 queues,
                 test_working_dir,
                 additional_includes=None,
                 name=None,
                 hostname=None,
                 manager_rest_port=MANAGER_REST_PORT,
                 concurrency=1):
Ejemplo n.º 12
0
import tempfile
import unittest
import shutil

from cloudify.exceptions import NonRecoverableError
from cloudify.mocks import MockCloudifyContext
from cloudify.utils import LocalCommandRunner
from cloudify.utils import setup_default_logger
from plugin_installer.tasks import install, update_includes
from cloudify.constants import CELERY_WORK_DIR_PATH_KEY
from cloudify.constants import VIRTUALENV_PATH_KEY
from cloudify.constants import LOCAL_IP_KEY
from cloudify.constants import MANAGER_FILE_SERVER_BLUEPRINTS_ROOT_URL_KEY


logger = setup_default_logger('test_plugin_installer')


def _get_local_path(ctx, plugin):
    return os.path.join(dirname(__file__),
                        plugin['source'])


class PluginInstallerTestCase(unittest.TestCase):

    TEST_BLUEPRINT_ID = 'test_id'
    MANAGER_FILE_SERVER_BLUEPRINTS_ROOT_URL = 'localhost/blueprints'

    def setUp(self):

        self.temp_folder = tempfile.mkdtemp()
Ejemplo n.º 13
0
 def __init__(self):
     self._pid = None
     self._process = None
     setup_default_logger('elasticsearch', logging.INFO)
     setup_default_logger('elasticsearch.trace', logging.INFO)
Ejemplo n.º 14
0
#    * See the License for the specific language governing permissions and
#    * limitations under the License.

import os
import re
import subprocess
import threading
import time
import requests

from os import path
from testenv.constants import MANAGER_REST_PORT
from testenv.utils import get_resource
from cloudify.utils import setup_default_logger

logger = setup_default_logger('riemann_process')


class RiemannProcess(object):
    """
    Manages a riemann server process lifecycle.
    """
    def __init__(self, config_path, libs_path):
        self._config_path = config_path
        self._libs_path = libs_path
        self.pid = None
        self._process = None
        self._detector = None
        self._event = None
        self._riemann_logs = list()
Ejemplo n.º 15
0
import os
import shutil
import subprocess
import sys
import time

from os import path
from os.path import dirname
from cloudify.utils import setup_default_logger
from testenv.constants import FILE_SERVER_PORT
from testenv.constants import MANAGER_REST_PORT
from testenv.constants import FILE_SERVER_BLUEPRINTS_FOLDER
from cloudify.celery import celery as celery_client


logger = setup_default_logger('celery_worker_process')


class CeleryWorkerProcess(object):

    # populated by start
    pids = []

    def __init__(self,
                 queues,
                 test_working_dir,
                 additional_includes=None,
                 name=None,
                 hostname=None,
                 manager_rest_port=MANAGER_REST_PORT,
                 concurrency=1):
Ejemplo n.º 16
0
import os
import tempfile
import shutil
import pip

from os import path
from cloudify import utils
from cloudify.constants import VIRTUALENV_PATH_KEY
from cloudify.constants import CELERY_WORK_DIR_PATH_KEY
from cloudify.exceptions import NonRecoverableError
from cloudify.utils import LocalCommandRunner
from cloudify.utils import setup_default_logger
from cloudify.decorators import operation

logger = setup_default_logger('plugin_installer.tasks')
manager_branch = 'master'


@operation
def install(ctx, plugins, **kwargs):
    """
    Installs the given plugins.
    Each method decorated with the 'cloudify.decorators.operation'
    will be registered as a task.

    :param ctx: Invocation context. See class CloudifyContext @context.py
    :param plugins: A collection of plugins to install.
    """

    global logger
Ejemplo n.º 17
0
from os.path import dirname
import tempfile
import unittest
import shutil

from cloudify.exceptions import NonRecoverableError
from cloudify.mocks import MockCloudifyContext
from cloudify.utils import LocalCommandRunner
from cloudify.utils import setup_default_logger
from plugin_installer.tasks import install, update_includes
from cloudify.constants import CELERY_WORK_DIR_PATH_KEY
from cloudify.constants import VIRTUALENV_PATH_KEY
from cloudify.constants import LOCAL_IP_KEY
from cloudify.constants import MANAGER_FILE_SERVER_BLUEPRINTS_ROOT_URL_KEY

logger = setup_default_logger('test_plugin_installer')


def _get_local_path(ctx, plugin):
    return os.path.join(dirname(__file__), plugin['source'])


class PluginInstallerTestCase(unittest.TestCase):

    TEST_BLUEPRINT_ID = 'test_id'
    MANAGER_FILE_SERVER_BLUEPRINTS_ROOT_URL = 'localhost/blueprints'

    def setUp(self):

        self.temp_folder = tempfile.mkdtemp()
Ejemplo n.º 18
0
#    * See the License for the specific language governing permissions and
#    * limitations under the License.

import os
import subprocess
import sys
import time
import yaml

from cloudify.utils import setup_default_logger
from os.path import dirname
from os import path
from cloudify_rest_client import CloudifyClient
from testenv.constants import MANAGER_REST_PORT

logger = setup_default_logger('manager_rest_process')


class ManagerRestProcess(object):
    def __init__(self, port, file_server_dir, file_server_base_uri,
                 file_server_blueprints_folder,
                 file_server_uploaded_blueprints_folder,
                 file_server_resources_uri, tempdir):
        self.process = None
        self.port = port or MANAGER_REST_PORT
        self.file_server_dir = file_server_dir
        self.file_server_base_uri = file_server_base_uri
        self.file_server_blueprints_folder = file_server_blueprints_folder
        self.file_server_uploaded_blueprints_folder = \
            file_server_uploaded_blueprints_folder
        self.file_server_resources_uri = file_server_resources_uri
Ejemplo n.º 19
0
#    * limitations under the License.

import os
import re
import subprocess
import threading
import time
import requests

from os import path
from testenv.constants import MANAGER_REST_PORT
from testenv.utils import get_resource
from cloudify.utils import setup_default_logger


logger = setup_default_logger('riemann_process')


class RiemannProcess(object):
    """
    Manages a riemann server process lifecycle.
    """

    def __init__(self, config_path, libs_path):
        self._config_path = config_path
        self._libs_path = libs_path
        self.pid = None
        self._process = None
        self._detector = None
        self._event = None
        self._riemann_logs = list()
Ejemplo n.º 20
0
from os import path
from testenv.constants import MANAGER_REST_PORT
from testenv.constants import RABBITMQ_VERBOSE_MESSAGES_ENABLED
from testenv.constants import RABBITMQ_POLLING_ENABLED
from testenv.constants import FILE_SERVER_RESOURCES_URI
from testenv.constants import FILE_SERVER_UPLOADED_BLUEPRINTS_FOLDER
from testenv.constants import FILE_SERVER_BLUEPRINTS_FOLDER
from testenv.processes.elastic import ElasticSearchProcess
from testenv.processes.manager_rest import ManagerRestProcess
from testenv.processes.riemann import RiemannProcess
from testenv import utils
from cloudify.utils import setup_default_logger
from testenv.processes.celery import CeleryWorkerProcess
from cloudify.logs import create_event_message_prefix

logger = setup_default_logger('TESTENV')
testenv_instance = None


class TestCase(unittest.TestCase):

    """
    A test case for cloudify integration tests.
    """

    def setUp(self):
        self.logger = setup_default_logger(self._testMethodName,
                                           logging.INFO)
        self.client = utils.create_rest_client()
        utils.restore_provider_context()
        TestEnvironment.start_celery_management_worker()
Ejemplo n.º 21
0
#    * See the License for the specific language governing permissions and
#    * limitations under the License.

import logging
import re
import shlex
import subprocess
import time
import sys
import os
import elasticsearch

from cloudify.utils import setup_default_logger
from testenv.constants import STORAGE_INDEX_NAME

logger = setup_default_logger('elasticsearch_process')


class ElasticSearchProcess(object):
    """
    Manages an ElasticSearch server process lifecycle.
    """
    def __init__(self):
        self._pid = None
        self._process = None
        setup_default_logger('elasticsearch', logging.INFO)
        setup_default_logger('elasticsearch.trace', logging.INFO)

    @staticmethod
    def _verify_service_responsiveness(timeout=120):
        import urllib2
Ejemplo n.º 22
0
 def setUp(self):
     self.logger = setup_default_logger(self._testMethodName,
                                        logging.INFO)
     self.client = utils.create_rest_client()
     utils.restore_provider_context()
     TestEnvironment.start_celery_management_worker()
Ejemplo n.º 23
0
from cloudify.logs import create_event_message_prefix

import mock_plugins
from testenv.constants import MANAGER_REST_PORT
from testenv.constants import RABBITMQ_VERBOSE_MESSAGES_ENABLED
from testenv.constants import RABBITMQ_POLLING_ENABLED
from testenv.constants import FILE_SERVER_RESOURCES_URI
from testenv.constants import FILE_SERVER_UPLOADED_BLUEPRINTS_FOLDER
from testenv.constants import FILE_SERVER_BLUEPRINTS_FOLDER
from testenv.processes.elastic import ElasticSearchProcess
from testenv.processes.manager_rest import ManagerRestProcess
from testenv.processes.riemann import RiemannProcess
from testenv.processes.celery import CeleryWorkerProcess
from testenv import utils

logger = setup_default_logger('TESTENV')
setup_default_logger('cloudify.rest_client', logging.INFO)
testenv_instance = None


def riemann_cleanup(fn):
    fn.riemann_cleanup = True
    return fn


class TestCase(unittest.TestCase):

    """
    A test case for cloudify integration tests.
    """
Ejemplo n.º 24
0
#    * limitations under the License.

import os
import subprocess
import sys
import time
import yaml

from cloudify.utils import setup_default_logger
from os.path import dirname
from os import path
from cloudify_rest_client import CloudifyClient
from testenv.constants import MANAGER_REST_PORT


logger = setup_default_logger('manager_rest_process')


class ManagerRestProcess(object):

    def __init__(self,
                 port,
                 file_server_dir,
                 file_server_base_uri,
                 file_server_blueprints_folder,
                 file_server_uploaded_blueprints_folder,
                 file_server_resources_uri,
                 tempdir):
        self.process = None
        self.port = port or MANAGER_REST_PORT
        self.file_server_dir = file_server_dir
Ejemplo n.º 25
0
 def setUp(self):
     self.logger = setup_default_logger(self._testMethodName,
                                        logging.INFO)
     self.client = utils.create_rest_client()
     utils.restore_provider_context()
     TestEnvironment.start_celery_management_worker()
Ejemplo n.º 26
0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
############

import os
import sys

from cloudify.utils import setup_default_logger

from cloudify_cli import cli
from cloudify_cli.utils import DEFAULT_LOG_FILE

runner_lgr = setup_default_logger('cli_runner')


def run_cli_expect_system_exit_0(command):
    run_cli_expect_system_exit_code(command, expected_code=0)


def run_cli_expect_system_exit_1(command):
    run_cli_expect_system_exit_code(command, expected_code=1)


def run_cli_expect_system_exit_code(command, expected_code):
    try:
        run_cli(command)
    except SystemExit as e:
        assert e.code == expected_code