def test_put_blueprint_archive_from_url(self): port = 53230 blueprint_id = 'new_blueprint_id' archive_path = self.archive_mock_blueprint( archive_func=archiving.make_tarbz2file) archive_filename = os.path.basename(archive_path) archive_dir = os.path.dirname(archive_path) archive_url = 'http://localhost:{0}/{1}'.format( port, archive_filename) fs = FileServer(archive_dir, False, port) fs.start() try: self.wait_for_url(archive_url) blueprint_id = self.client.blueprints.publish_archive( archive_url, blueprint_id).id # verifying blueprint exists result = self.client.blueprints.get(blueprint_id) self.assertEqual(blueprint_id, result.id) finally: fs.stop()
def setUp(self): self.tmpdir = tempfile.mkdtemp() self.rest_service_log = tempfile.mkstemp()[1] self.securest_log_file = tempfile.mkstemp()[1] self.file_server = FileServer(self.tmpdir) self.addCleanup(self.cleanup) self.file_server.start() storage_manager.storage_manager_module_name = \ STORAGE_MANAGER_MODULE_NAME # workaround for setting the rest service log path, since it's # needed when 'server' module is imported. # right after the import the log path is set normally like the rest # of the variables (used in the reset_state) tmp_conf_file = tempfile.mkstemp()[1] json.dump({'rest_service_log_path': self.rest_service_log, 'rest_service_log_file_size_MB': 1, 'rest_service_log_files_backup_count': 1, 'rest_service_log_level': 'DEBUG'}, open(tmp_conf_file, 'w')) os.environ['MANAGER_REST_CONFIG_PATH'] = tmp_conf_file try: from manager_rest import server finally: del(os.environ['MANAGER_REST_CONFIG_PATH']) server.reset_state(self.create_configuration()) utils.copy_resources(config.instance().file_server_root) server.setup_app() server.app.config['Testing'] = True self.app = server.app.test_client() self.client = self.create_client() self.initialize_provider_context()
def setUp(self): self.tmpdir = tempfile.mkdtemp() self.file_server = FileServer(self.tmpdir) self.file_server.start() storage_manager.storage_manager_module_name = \ STORAGE_MANAGER_MODULE_NAME server.reset_state(self.create_configuration()) util.copy_resources(config.instance().file_server_root) server.setup_app() server.app.config['Testing'] = True self.app = server.app.test_client()
def test_publish_archive_blueprint_main_file_name(self): port = 53230 blueprint_id = 'publish_archive_blueprint_main_file_name' main_file_name = 'blueprint_with_workflows.yaml' archive_path = self.archive_mock_blueprint() archive_filename = os.path.basename(archive_path) archive_dir = os.path.dirname(archive_path) fs = FileServer(archive_dir, False, port) fs.start() try: archive_url = 'http://localhost:{0}/{1}'.format( port, archive_filename) self.wait_for_url(archive_url) response = self.client.blueprints.publish_archive(archive_url, blueprint_id, main_file_name) finally: fs.stop() self.assertEqual(blueprint_id, response.id) self.assertEqual(main_file_name, response.main_file_name)
def test_put_blueprint_from_url(self): port = 53230 blueprint_id = 'new_blueprint_id' resource_path = '/blueprints/{0}'.format(blueprint_id) archive_path = self.archive_mock_blueprint() archive_filename = os.path.basename(archive_path) archive_dir = os.path.dirname(archive_path) fs = FileServer(archive_dir, False, port) fs.start() try: archive_url = 'http://localhost:{0}/{1}'.format( port, archive_filename) self.wait_for_url(archive_url) response = self.put( resource_path, None, {'blueprint_archive_url': archive_url}) self.assertEqual(blueprint_id, response.json['id']) finally: fs.stop()
def setUp(self): self.tmpdir = tempfile.mkdtemp() self.file_server = FileServer(self.tmpdir) self.file_server.start() storage_manager.storage_manager_module_name = \ STORAGE_MANAGER_MODULE_NAME server.reset_state(self.create_configuration()) util.copy_resources(config.instance().file_server_root) server.setup_app() server.app.config['Testing'] = True self.app = server.app.test_client() self.client = CloudifyClient('localhost') mock_http_client = MockHTTPClient(self.app) self.client.blueprints.api = mock_http_client self.client.deployments.api = mock_http_client self.client.executions.api = mock_http_client self.client.nodes.api = mock_http_client self.client.node_instances.api = mock_http_client self.client.manager.api = mock_http_client
def start_fileserver(self): # workaround to update path manager_rest_path = path.dirname(path.dirname(path.dirname(__file__))) manager_rest_path = path.join(manager_rest_path, "rest-service") sys.path.append(manager_rest_path) os.mkdir(self.fileserver_dir) from manager_rest.file_server import FileServer from manager_rest.utils import copy_resources self.file_server_process = FileServer(self.fileserver_dir) self.file_server_process.start() # copy resources (base yaml etc) resources_path = path.abspath(__file__) resources_path = path.dirname(resources_path) resources_path = path.dirname(resources_path) resources_path = path.dirname(resources_path) resources_path = path.join(resources_path, "resources") copy_resources(self.fileserver_dir, resources_path) self.patch_source_urls(self.fileserver_dir)
def setUp(self): self.tmpdir = tempfile.mkdtemp() self.rest_service_log = tempfile.mkstemp()[1] self.file_server = FileServer(self.tmpdir) self.file_server.start() storage_manager.storage_manager_module_name = \ STORAGE_MANAGER_MODULE_NAME # workaround for setting the rest service log path, since it's # needed when 'server' module is imported. # right after the import the log path is set normally like the rest # of the variables (used in the reset_state) tmp_conf_file = tempfile.mkstemp()[1] json.dump({'rest_service_log_path': self.rest_service_log}, open(tmp_conf_file, 'w')) os.environ['MANAGER_REST_CONFIG_PATH'] = tmp_conf_file try: from manager_rest import server finally: del(os.environ['MANAGER_REST_CONFIG_PATH']) server.reset_state(self.create_configuration()) util.copy_resources(config.instance().file_server_root) server.setup_app() server.app.config['Testing'] = True self.app = server.app.test_client() self.client = CloudifyClient('localhost') mock_http_client = MockHTTPClient(self.app) self.client._client = mock_http_client self.client.blueprints.api = mock_http_client self.client.deployments.api = mock_http_client self.client.deployments.outputs.api = mock_http_client self.client.deployments.modify.api = mock_http_client self.client.executions.api = mock_http_client self.client.nodes.api = mock_http_client self.client.node_instances.api = mock_http_client self.client.manager.api = mock_http_client self.client.evaluate.api = mock_http_client
def test_publish_archive_blueprint_main_file_name(self): port = 53230 blueprint_id = 'publish_archive_blueprint_main_file_name' main_file_name = 'blueprint_with_workflows.yaml' archive_path = self.archive_mock_blueprint() archive_filename = os.path.basename(archive_path) archive_dir = os.path.dirname(archive_path) fs = FileServer(archive_dir, False, port) fs.start() try: archive_url = 'http://localhost:{0}/{1}'.format( port, archive_filename) self.wait_for_url(archive_url) response = self.client.blueprints.publish_archive( archive_url, blueprint_id, main_file_name) finally: fs.stop() self.assertEqual(blueprint_id, response.id) self.assertEqual(main_file_name, response.main_file_name)
def test_put_blueprint_from_url(self): port = 53230 blueprint_id = 'new_blueprint_id' resource_path = '/blueprints/{0}'.format(blueprint_id) archive_path = self.archive_mock_blueprint() archive_filename = os.path.basename(archive_path) archive_dir = os.path.dirname(archive_path) fs = FileServer(archive_dir, False, port) fs.start() try: archive_url = 'http://localhost:{0}/{1}'.format( port, archive_filename) self.wait_for_url(archive_url) response = self.put(resource_path, None, {'blueprint_archive_url': archive_url}) self.assertEqual(blueprint_id, response.json['id']) finally: fs.stop()
def test_put_blueprint_archive_from_url(self): port = 53230 blueprint_id = 'new_blueprint_id' archive_path = self.archive_mock_blueprint( archive_func=archiving.make_tarbz2file) archive_filename = os.path.basename(archive_path) archive_dir = os.path.dirname(archive_path) archive_url = 'http://localhost:{0}/{1}'.format(port, archive_filename) fs = FileServer(archive_dir, False, port) fs.start() try: self.wait_for_url(archive_url) blueprint_id = self.client.blueprints.publish_archive( archive_url, blueprint_id).id # verifying blueprint exists result = self.client.blueprints.get(blueprint_id) self.assertEqual(blueprint_id, result.id) finally: fs.stop()
def test_publish_bz2_archive_from_url(self): port = 53231 archive_location = self._make_archive_file("dsl/basic.yaml", 'w:bz2') archive_filename = os.path.basename(archive_location) archive_dir = os.path.dirname(archive_location) archive_url = 'http://localhost:{0}/{1}'.format( port, archive_filename) fs = FileServer(archive_dir, False, port) fs.start() try: wait_for_url(archive_url, timeout=30) blueprint_id = self.client.blueprints.publish_archive( archive_url, str(uuid.uuid4()), 'basic.yaml').id # verifying blueprint exists result = self.client.blueprints.get(blueprint_id) self.assertEqual(blueprint_id, result.id) finally: fs.stop()
class BaseServerTestCase(unittest.TestCase): def __init__(self, *args, **kwargs): super(BaseServerTestCase, self).__init__(*args, **kwargs) def create_client(self, headers=None): client = CloudifyClient(host='localhost', headers=headers) mock_http_client = MockHTTPClient(self.app, headers=headers) client._client = mock_http_client client.blueprints.api = mock_http_client client.deployments.api = mock_http_client client.deployments.outputs.api = mock_http_client client.deployment_modifications.api = mock_http_client client.executions.api = mock_http_client client.nodes.api = mock_http_client client.node_instances.api = mock_http_client client.manager.api = mock_http_client client.evaluate.api = mock_http_client client.tokens.api = mock_http_client client.events.api = mock_http_client # only exists in v2 and above if CLIENT_API_VERSION != 'v1': client.plugins.api = mock_http_client client.snapshots.api = mock_http_client return client def setUp(self): self.tmpdir = tempfile.mkdtemp() self.rest_service_log = tempfile.mkstemp()[1] self.securest_log_file = tempfile.mkstemp()[1] self.file_server = FileServer(self.tmpdir) self.addCleanup(self.cleanup) self.file_server.start() storage_manager.storage_manager_module_name = \ STORAGE_MANAGER_MODULE_NAME # workaround for setting the rest service log path, since it's # needed when 'server' module is imported. # right after the import the log path is set normally like the rest # of the variables (used in the reset_state) tmp_conf_file = tempfile.mkstemp()[1] json.dump({'rest_service_log_path': self.rest_service_log, 'rest_service_log_file_size_MB': 1, 'rest_service_log_files_backup_count': 1, 'rest_service_log_level': 'DEBUG'}, open(tmp_conf_file, 'w')) os.environ['MANAGER_REST_CONFIG_PATH'] = tmp_conf_file try: from manager_rest import server finally: del(os.environ['MANAGER_REST_CONFIG_PATH']) server.reset_state(self.create_configuration()) utils.copy_resources(config.instance().file_server_root) server.setup_app() server.app.config['Testing'] = True self.app = server.app.test_client() self.client = self.create_client() self.initialize_provider_context() def cleanup(self): self.quiet_delete(self.rest_service_log) self.quiet_delete(self.securest_log_file) if self.file_server: self.file_server.stop() def initialize_provider_context(self, client=None): if not client: client = self.client # creating an empty bootstrap context client.manager.create_context(self.id(), {'cloudify': {}}) def create_configuration(self): from manager_rest.config import Config test_config = Config() test_config.test_mode = True test_config.file_server_root = self.tmpdir test_config.file_server_base_uri = 'http://localhost:{0}'.format( FILE_SERVER_PORT) test_config.file_server_blueprints_folder = \ FILE_SERVER_BLUEPRINTS_FOLDER test_config.file_server_uploaded_blueprints_folder = \ FILE_SERVER_UPLOADED_BLUEPRINTS_FOLDER test_config.file_server_resources_uri = FILE_SERVER_RESOURCES_URI test_config.rest_service_log_level = 'DEBUG' test_config.rest_service_log_path = self.rest_service_log test_config.rest_service_log_file_size_MB = 100, test_config.rest_service_log_files_backup_count = 20 test_config.security_audit_log_level = 'DEBUG' test_config.security_audit_log_file = self.securest_log_file test_config.security_audit_log_file_size_MB = 100 test_config.security_audit_log_files_backup_count = 20 return test_config def _version_url(self, url): # method for versionifying URLs for requests which don't go through # the REST client; the version is taken from the REST client regardless if CLIENT_API_VERSION not in url: url = '/api/{0}{1}'.format(CLIENT_API_VERSION, url) return url def post(self, resource_path, data, query_params=None): url = self._version_url(resource_path) result = self.app.post(urllib.quote(url), content_type='application/json', data=json.dumps(data), query_string=build_query_string(query_params)) result.json = json.loads(result.data) return result def post_file(self, resource_path, file_path, query_params=None): url = self._version_url(resource_path) with open(file_path) as f: result = self.app.post(urllib.quote(url), data=f.read(), query_string=build_query_string( query_params)) result.json = json.loads(result.data) return result def put_file(self, resource_path, file_path, query_params=None): url = self._version_url(resource_path) with open(file_path) as f: result = self.app.put(urllib.quote(url), data=f.read(), query_string=build_query_string( query_params)) result.json = json.loads(result.data) return result def put(self, resource_path, data=None, query_params=None): url = self._version_url(resource_path) result = self.app.put(urllib.quote(url), content_type='application/json', data=json.dumps(data) if data else None, query_string=build_query_string(query_params)) result.json = json.loads(result.data) return result def patch(self, resource_path, data): url = self._version_url(resource_path) result = self.app.patch(urllib.quote(url), content_type='application/json', data=json.dumps(data)) result.json = json.loads(result.data) return result def get(self, resource_path, query_params=None, headers=None): url = self._version_url(resource_path) result = self.app.get(urllib.quote(url), headers=headers, query_string=build_query_string(query_params)) result.json = json.loads(result.data) return result def head(self, resource_path): url = self._version_url(resource_path) result = self.app.head(urllib.quote(url)) return result def delete(self, resource_path, query_params=None): url = self._version_url(resource_path) result = self.app.delete(urllib.quote(url), query_string=build_query_string(query_params)) result.json = json.loads(result.data) return result def check_if_resource_on_fileserver(self, blueprint_id, resource_path): url = 'http://localhost:{0}/{1}/{2}/{3}'.format( FILE_SERVER_PORT, FILE_SERVER_BLUEPRINTS_FOLDER, blueprint_id, resource_path) try: urllib2.urlopen(url) return True except urllib2.HTTPError: return False def get_blueprint_path(self, blueprint_dir_name): return os.path.join(os.path.dirname( os.path.abspath(__file__)), blueprint_dir_name) def archive_mock_blueprint(self, archive_func=archiving.make_targzfile, blueprint_dir='mock_blueprint'): archive_path = tempfile.mkstemp()[1] source_dir = os.path.join(os.path.dirname( os.path.abspath(__file__)), blueprint_dir) archive_func(archive_path, source_dir) return archive_path def get_mock_blueprint_path(self): return os.path.join(os.path.dirname( os.path.abspath(__file__)), 'mock_blueprint', 'blueprint.yaml') def put_blueprint_args(self, blueprint_file_name=None, blueprint_id='blueprint', archive_func=archiving.make_targzfile, blueprint_dir='mock_blueprint'): resource_path = self._version_url( '/blueprints/{1}'.format(CLIENT_API_VERSION, blueprint_id)) result = [ resource_path, self.archive_mock_blueprint(archive_func, blueprint_dir), ] if blueprint_file_name: data = {'application_file_name': blueprint_file_name} else: data = {} result.append(data) return result def put_deployment(self, deployment_id='deployment', blueprint_file_name=None, blueprint_id='blueprint', inputs=None): blueprint_response = self.put_file( *self.put_blueprint_args(blueprint_file_name, blueprint_id)).json if 'error_code' in blueprint_response: raise RuntimeError( '{}: {}'.format(blueprint_response['error_code'], blueprint_response['message'])) blueprint_id = blueprint_response['id'] deployment = self.client.deployments.create(blueprint_id, deployment_id, inputs=inputs) return blueprint_id, deployment.id, blueprint_response, deployment def upload_plugin(self, package_name, package_version): temp_file_path = self.create_wheel(package_name, package_version) response = self.post_file('/plugins', temp_file_path) os.remove(temp_file_path) return response def create_wheel(self, package_name, package_version): module_src = '{0}=={1}'.format(package_name, package_version) wagon_client = Wagon(module_src) return wagon_client.create( archive_destination_dir=tempfile.gettempdir(), force=True) def wait_for_url(self, url, timeout=5): end = time.time() + timeout while end >= time.time(): try: status = urllib.urlopen(url).getcode() if status == 200: return except IOError: time.sleep(1) raise RuntimeError('Url {0} is not available (waited {1} ' 'seconds)'.format(url, timeout)) @staticmethod def quiet_delete(file_path): try: os.remove(file_path) except: pass def wait_for_deployment_creation(self, client, deployment_id): env_creation_execution = None deployment_executions = client.executions.list(deployment_id) for execution in deployment_executions: if execution.workflow_id == 'create_deployment_environment': env_creation_execution = execution break if env_creation_execution: self.wait_for_execution(client, env_creation_execution) @staticmethod def wait_for_execution(client, execution, timeout=900): # Poll for execution status until execution ends deadline = time.time() + timeout while True: if time.time() > deadline: raise Exception( 'execution of operation {0} for deployment {1} timed out'. format(execution.workflow_id, execution.deployment_id)) execution = client.executions.get(execution.id) if execution.status in Execution.END_STATES: break time.sleep(3)
class TestEnvironment(object): manager_rest_process = None elasticsearch_process = None riemann_process = None file_server_process = None celery_management_worker_process = None def __init__(self, test_working_dir): super(TestEnvironment, self).__init__() self.test_working_dir = test_working_dir self.plugins_storage_dir = os.path.join( self.test_working_dir, 'plugins-storage' ) os.makedirs(self.plugins_storage_dir) self.fileserver_dir = path.join(self.test_working_dir, 'fileserver') self.rest_service_log_path = path.join( self.test_working_dir, 'cloudify-rest-service.log') def create(self): try: logger.info('Setting up test environment... workdir=[{0}]' .format(self.test_working_dir)) # events/logs polling start_events_and_logs_polling() self.start_elasticsearch() self.start_riemann() self.start_fileserver() self.start_manager_rest() self.create_management_worker() except BaseException as error: s_traceback = StringIO.StringIO() traceback.print_exc(file=s_traceback) logger.error("Error in test environment setup: %s", error) logger.error(s_traceback.getvalue()) self.destroy() raise def create_management_worker(self): mock_plugins_path = os.path.dirname(mock_plugins.__file__) os.environ['MOCK_PLUGINS_PATH'] = mock_plugins_path self.celery_management_worker_process = CeleryWorkerProcess( queues=['cloudify.management'], test_working_dir=self.test_working_dir, # these plugins are already installed. # so we just need to append to the includes. # note that these are not mocks, but the actual production # code plugins. additional_includes=[ 'riemann_controller.tasks', 'cloudify_system_workflows.deployment_environment', 'cloudify.plugins.workflows', 'diamond_agent.tasks', 'script_runner.tasks' ], # we need higher concurrency since # 'deployment_environment.create' calls # 'plugin_installer.install' as a sub-task # and they are both executed inside # this worker concurrency=2 ) # copy plugins to worker env mock_plugins_path = os.path.dirname(mock_plugins.__file__) shutil.copytree( src=mock_plugins_path, dst=self.celery_management_worker_process.envdir, ignore=shutil.ignore_patterns('*.pyc') ) def start_riemann(self): riemann_config_path = self._get_riemann_config() libs_path = self._get_libs_path() self.riemann_process = RiemannProcess(riemann_config_path, libs_path) self.riemann_process.start() def start_manager_rest(self): from manager_rest.file_server import PORT as FS_PORT file_server_base_uri = 'http://localhost:{0}'.format(FS_PORT) self.manager_rest_process = ManagerRestProcess( MANAGER_REST_PORT, self.fileserver_dir, file_server_base_uri, FILE_SERVER_BLUEPRINTS_FOLDER, FILE_SERVER_UPLOADED_BLUEPRINTS_FOLDER, FILE_SERVER_RESOURCES_URI, self.rest_service_log_path, self.test_working_dir) self.manager_rest_process.start() def start_elasticsearch(self): # elasticsearch self.elasticsearch_process = ElasticSearchProcess() self.elasticsearch_process.start() def start_fileserver(self): # workaround to update path manager_rest_path = \ path.dirname(path.dirname(path.dirname(__file__))) manager_rest_path = path.join(manager_rest_path, 'rest-service') sys.path.append(manager_rest_path) os.mkdir(self.fileserver_dir) from manager_rest.file_server import FileServer from manager_rest.util import copy_resources self.file_server_process = FileServer(self.fileserver_dir) self.file_server_process.start() # copy resources (base yaml etc) resources_path = path.abspath(__file__) resources_path = path.dirname(resources_path) resources_path = path.dirname(resources_path) resources_path = path.dirname(resources_path) resources_path = path.join(resources_path, 'resources') copy_resources(self.fileserver_dir, resources_path) self.patch_source_urls(self.fileserver_dir) def destroy(self): logger.info('Destroying test environment...') if self.riemann_process: self.riemann_process.close() if self.elasticsearch_process: self.elasticsearch_process.close() if self.manager_rest_process: self.manager_rest_process.close() if self.file_server_process: self.file_server_process.stop() self.delete_working_directory() def delete_working_directory(self): if os.path.exists(self.test_working_dir): logger.info('Deleting test environment from: %s', self.test_working_dir) shutil.rmtree(self.test_working_dir, ignore_errors=True) @classmethod def _get_riemann_config(cls): manager_dir = cls._get_manager_root() plugins_dir = os.path.join(manager_dir, 'plugins') riemann_dir = os.path.join(plugins_dir, 'riemann-controller') package_dir = os.path.join(riemann_dir, 'riemann_controller') resources_dir = os.path.join(package_dir, 'resources') manager_config = os.path.join(resources_dir, 'manager.config') return manager_config @classmethod def _get_libs_path(cls): return path.join(cls._get_manager_root(), '.libs') @staticmethod def reset_elasticsearch_data(): global testenv_instance testenv_instance.elasticsearch_process.reset_data() @staticmethod def stop_celery_management_worker(): global testenv_instance testenv_instance.celery_management_worker_process.stop() @staticmethod def read_celery_management_logs(): global testenv_instance process = testenv_instance.celery_management_worker_process return process.try_read_logfile() @classmethod def stop_all_celery_processes(cls): logger.info('Shutting down all celery processes') os.system("pkill -9 -f 'celery worker'") @staticmethod def start_celery_management_worker(): global testenv_instance testenv_instance.celery_management_worker_process.start() @staticmethod def riemann_cleanup(): global testenv_instance shutil.rmtree(TestEnvironment.riemann_workdir()) os.mkdir(TestEnvironment.riemann_workdir()) testenv_instance.riemann_process.restart() @staticmethod def riemann_workdir(): global testenv_instance return testenv_instance.\ celery_management_worker_process.\ riemann_config_dir @staticmethod def _get_manager_root(): init_file = __file__ testenv_dir = dirname(init_file) tests_dir = dirname(testenv_dir) manager_dir = dirname(tests_dir) return manager_dir @staticmethod def patch_source_urls(resources): with open(path.join(resources, 'cloudify', 'types', 'types.yaml')) as f: types_yaml = yaml.safe_load(f.read()) for policy_type in types_yaml.get('policy_types', {}).values(): in_path = '/cloudify/policies/' source = policy_type['source'] if in_path in source: source = source[source.index(in_path) + 1:] policy_type['source'] = source for policy_trigger in types_yaml.get('policy_triggers', {}).values(): in_path = '/cloudify/triggers/' source = policy_trigger['source'] if in_path in source: source = source[source.index(in_path) + 1:] policy_trigger['source'] = source with open(path.join(resources, 'cloudify', 'types', 'types.yaml'), 'w') as f: f.write(yaml.safe_dump(types_yaml))
class BaseServerTestCase(unittest.TestCase): def setUp(self): self.tmpdir = tempfile.mkdtemp() self.file_server = FileServer(self.tmpdir) self.file_server.start() storage_manager.storage_manager_module_name = \ STORAGE_MANAGER_MODULE_NAME server.reset_state(self.create_configuration()) util.copy_resources(config.instance().file_server_root) server.setup_app() server.app.config['Testing'] = True self.app = server.app.test_client() self.client = CloudifyClient('localhost') mock_http_client = MockHTTPClient(self.app) self.client.blueprints.api = mock_http_client self.client.deployments.api = mock_http_client self.client.deployments.outputs.api = mock_http_client self.client.executions.api = mock_http_client self.client.nodes.api = mock_http_client self.client.node_instances.api = mock_http_client self.client.manager.api = mock_http_client def tearDown(self): self.file_server.stop() def create_configuration(self): from manager_rest.config import Config test_config = Config() test_config.test_mode = True test_config.file_server_root = self.tmpdir test_config.file_server_base_uri = 'http://localhost:{0}'.format( FILE_SERVER_PORT) test_config.file_server_blueprints_folder = \ FILE_SERVER_BLUEPRINTS_FOLDER test_config.file_server_uploaded_blueprints_folder = \ FILE_SERVER_UPLOADED_BLUEPRINTS_FOLDER test_config.file_server_resources_uri = FILE_SERVER_RESOURCES_URI return test_config def post(self, resource_path, data, query_params=None): url = self._build_url(resource_path, query_params) result = self.app.post(url, content_type='application/json', data=json.dumps(data)) result.json = json.loads(result.data) return result def post_file(self, resource_path, file_path, query_params=None): with open(file_path) as f: result = self.app.post( self._build_url(resource_path, query_params), data=f.read()) result.json = json.loads(result.data) return result def put_file(self, resource_path, file_path, query_params=None): with open(file_path) as f: result = self.app.put( self._build_url(resource_path, query_params), data=f.read()) result.json = json.loads(result.data) return result def put(self, resource_path, data): result = self.app.put(urllib.quote(resource_path), content_type='application/json', data=json.dumps(data)) result.json = json.loads(result.data) return result def patch(self, resource_path, data): result = self.app.patch(urllib.quote(resource_path), content_type='application/json', data=json.dumps(data)) result.json = json.loads(result.data) return result def get(self, resource_path, query_params=None): result = self.app.get(self._build_url(resource_path, query_params)) result.json = json.loads(result.data) return result def head(self, resource_path): result = self.app.head(urllib.quote(resource_path)) return result def delete(self, resource_path, query_params=None): result = self.app.delete(self._build_url(resource_path, query_params)) result.json = json.loads(result.data) return result def check_if_resource_on_fileserver(self, blueprint_id, resource_path): url = 'http://localhost:{0}/{1}/{2}/{3}'.format( FILE_SERVER_PORT, FILE_SERVER_BLUEPRINTS_FOLDER, blueprint_id, resource_path) try: urllib2.urlopen(url) return True except urllib2.HTTPError: return False def put_blueprint_args(self, blueprint_file_name=None, blueprint_id='blueprint'): def make_tarfile(output_filename, source_dir): with tarfile.open(output_filename, "w:gz") as tar: tar.add(source_dir, arcname=os.path.basename(source_dir)) def tar_mock_blueprint(): tar_path = tempfile.mktemp() source_dir = os.path.join(os.path.dirname( os.path.abspath(__file__)), 'mock_blueprint') make_tarfile(tar_path, source_dir) return tar_path resource_path = '/blueprints/{0}'.format(blueprint_id) result = [ resource_path, tar_mock_blueprint(), ] if blueprint_file_name: data = {'application_file_name': blueprint_file_name} else: data = {} result.append(data) return result def put_deployment(self, deployment_id='deployment', blueprint_file_name=None, blueprint_id='blueprint', inputs=None): blueprint_response = self.put_file( *self.put_blueprint_args(blueprint_file_name, blueprint_id)).json if 'error_code' in blueprint_response: raise RuntimeError( '{}: {}'.format(blueprint_response['error_code'], blueprint_response['message'])) blueprint_id = blueprint_response['id'] deployment = self.client.deployments.create(blueprint_id, deployment_id, inputs=inputs) return blueprint_id, deployment.id, blueprint_response, deployment def _build_url(self, resource_path, query_params): query_string = '' if query_params and len(query_params) > 0: query_string += '&' + urllib.urlencode(query_params) return '{0}?{1}'.format(urllib.quote(resource_path), query_string) return resource_path
class BaseServerTestCase(unittest.TestCase): def __init__(self, *args, **kwargs): super(BaseServerTestCase, self).__init__(*args, **kwargs) def create_client(self, api_version=DEFAULT_API_VERSION, headers=None): client = CloudifyClient(host='localhost', api_version=api_version, headers=headers) mock_http_client = MockHTTPClient(self.app, api_version=api_version, headers=headers) client._client = mock_http_client client.blueprints.api = mock_http_client client.deployments.api = mock_http_client client.deployments.outputs.api = mock_http_client client.deployment_modifications.api = mock_http_client client.executions.api = mock_http_client client.nodes.api = mock_http_client client.node_instances.api = mock_http_client client.manager.api = mock_http_client client.evaluate.api = mock_http_client client.tokens.api = mock_http_client return client def setUp(self): self.tmpdir = tempfile.mkdtemp() self.rest_service_log = tempfile.mkstemp()[1] self.securest_log_file = tempfile.mkstemp()[1] self.file_server = FileServer(self.tmpdir) self.addCleanup(self.cleanup) self.file_server.start() storage_manager.storage_manager_module_name = \ STORAGE_MANAGER_MODULE_NAME # workaround for setting the rest service log path, since it's # needed when 'server' module is imported. # right after the import the log path is set normally like the rest # of the variables (used in the reset_state) tmp_conf_file = tempfile.mkstemp()[1] json.dump({'rest_service_log_path': self.rest_service_log, 'rest_service_log_file_size_MB': 1, 'rest_service_log_files_backup_count': 1, 'rest_service_log_level': 'DEBUG'}, open(tmp_conf_file, 'w')) os.environ['MANAGER_REST_CONFIG_PATH'] = tmp_conf_file try: from manager_rest import server finally: del(os.environ['MANAGER_REST_CONFIG_PATH']) server.reset_state(self.create_configuration()) utils.copy_resources(config.instance().file_server_root) server.setup_app() server.app.config['Testing'] = True self.app = server.app.test_client() self.client = self.create_client() self.initialize_provider_context() def cleanup(self): self.quiet_delete(self.rest_service_log) self.quiet_delete(self.securest_log_file) if self.file_server: self.file_server.stop() def initialize_provider_context(self): # creating an empty bootstrap context self.client.manager.create_context(self.id(), {'cloudify': {}}) def create_configuration(self): from manager_rest.config import Config test_config = Config() test_config.test_mode = True test_config.file_server_root = self.tmpdir test_config.file_server_base_uri = 'http://localhost:{0}'.format( FILE_SERVER_PORT) test_config.file_server_blueprints_folder = \ FILE_SERVER_BLUEPRINTS_FOLDER test_config.file_server_uploaded_blueprints_folder = \ FILE_SERVER_UPLOADED_BLUEPRINTS_FOLDER test_config.file_server_resources_uri = FILE_SERVER_RESOURCES_URI test_config.rest_service_log_level = 'DEBUG' test_config.rest_service_log_path = self.rest_service_log test_config.rest_service_log_file_size_MB = 100, test_config.rest_service_log_files_backup_count = 20 test_config.securest_log_level = 'DEBUG' test_config.securest_log_file = self.securest_log_file test_config.securest_log_file_size_MB = 100 test_config.securest_log_files_backup_count = 20 return test_config def post(self, resource_path, data, query_params=None): url = self.client._client.version_url(resource_path) result = self.app.post(urllib.quote(url), content_type='application/json', data=json.dumps(data), query_string=build_query_string(query_params)) result.json = json.loads(result.data) return result def post_file(self, resource_path, file_path, query_params=None): url = self.client._client.version_url(resource_path) with open(file_path) as f: result = self.app.post(urllib.quote(url), data=f.read(), query_string=build_query_string( query_params)) result.json = json.loads(result.data) return result def put_file(self, resource_path, file_path, query_params=None): url = self.client._client.version_url(resource_path) with open(file_path) as f: result = self.app.put(urllib.quote(url), data=f.read(), query_string=build_query_string( query_params)) result.json = json.loads(result.data) return result def put(self, resource_path, data=None, query_params=None): url = self.client._client.version_url(resource_path) result = self.app.put(urllib.quote(url), content_type='application/json', data=json.dumps(data) if data else None, query_string=build_query_string(query_params)) result.json = json.loads(result.data) return result def patch(self, resource_path, data): url = self.client._client.version_url(resource_path) result = self.app.patch(urllib.quote(url), content_type='application/json', data=json.dumps(data)) result.json = json.loads(result.data) return result def get(self, resource_path, query_params=None, headers=None): url = self.client._client.version_url(resource_path) result = self.app.get(urllib.quote(url), headers=headers, query_string=build_query_string(query_params)) result.json = json.loads(result.data) return result def head(self, resource_path): url = self.client._client.version_url(resource_path) result = self.app.head(urllib.quote(url)) return result def delete(self, resource_path, query_params=None): url = self.client._client.version_url(resource_path) result = self.app.delete(urllib.quote(url), query_string=build_query_string(query_params)) result.json = json.loads(result.data) return result def check_if_resource_on_fileserver(self, blueprint_id, resource_path): url = 'http://localhost:{0}/{1}/{2}/{3}'.format( FILE_SERVER_PORT, FILE_SERVER_BLUEPRINTS_FOLDER, blueprint_id, resource_path) try: urllib2.urlopen(url) return True except urllib2.HTTPError: return False def archive_mock_blueprint(self, archive_func=archiving.make_targzfile, blueprint_dir='mock_blueprint'): archive_path = tempfile.mkstemp()[1] source_dir = os.path.join(os.path.dirname( os.path.abspath(__file__)), blueprint_dir) archive_func(archive_path, source_dir) return archive_path def get_mock_blueprint_path(self): return os.path.join(os.path.dirname( os.path.abspath(__file__)), 'mock_blueprint', 'blueprint.yaml') def put_blueprint_args(self, blueprint_file_name=None, blueprint_id='blueprint', archive_func=archiving.make_targzfile, blueprint_dir='mock_blueprint', api_version=DEFAULT_API_VERSION): resource_path = '/{0}/blueprints/{1}'.format(api_version, blueprint_id) result = [ resource_path, self.archive_mock_blueprint(archive_func, blueprint_dir), ] if blueprint_file_name: data = {'application_file_name': blueprint_file_name} else: data = {} result.append(data) return result def put_deployment(self, deployment_id='deployment', blueprint_file_name=None, blueprint_id='blueprint', inputs=None): blueprint_response = self.put_file( *self.put_blueprint_args(blueprint_file_name, blueprint_id)).json if 'error_code' in blueprint_response: raise RuntimeError( '{}: {}'.format(blueprint_response['error_code'], blueprint_response['message'])) blueprint_id = blueprint_response['id'] deployment = self.client.deployments.create(blueprint_id, deployment_id, inputs=inputs) return blueprint_id, deployment.id, blueprint_response, deployment def wait_for_url(self, url, timeout=5): end = time.time() + timeout while end >= time.time(): try: status = urllib.urlopen(url).getcode() if status == 200: return except IOError: time.sleep(1) raise RuntimeError('Url {0} is not available (waited {1} ' 'seconds)'.format(url, timeout)) @staticmethod def quiet_delete(file_path): try: os.remove(file_path) except: pass
class TestEnvironment(object): """ Creates the cosmo test environment: - Riemann server. - Elasticsearch server. - Celery worker. - Ruote service. - Prepares celery app dir with plugins from cosmo module and official riemann configurer and plugin installer. """ _instance = None _celery_operations_worker_process = None _celery_workflows_worker_process = None _riemann_process = None _elasticsearch_process = None _manager_rest_process = None _tempdir = None _plugins_tempdir = None _scope = None _file_server_process = None def __init__(self, scope, use_mock_deployment_environment_workflows=True): try: TestEnvironmentScope.validate(scope) logger.info("Setting up test environment... [scope={0}]".format( scope)) self._scope = scope # temp directory self._tempdir = tempfile.mkdtemp(suffix="test", prefix="cloudify") self._plugins_tempdir = path.join(self._tempdir, "cosmo-work") self._riemann_tempdir = path.join(self._tempdir, "riemann") logger.info("Test environment will be stored in: %s", self._tempdir) if not path.exists(self._plugins_tempdir): os.makedirs(self._plugins_tempdir) if not path.exists(self._riemann_tempdir): os.makedirs(self._riemann_tempdir) # events/logs polling start_events_and_logs_polling() # riemann riemann_config_path = self._get_riemann_config() libs_path = self._get_libs_path() self._riemann_process = RiemannProcess(riemann_config_path, libs_path) self._riemann_process.start() # elasticsearch self._elasticsearch_process = ElasticSearchProcess() self._elasticsearch_process.start() # copy all plugins to app path try: import workflows # workflows/__init__.py(c) workflow_plugin_path = path.abspath(workflows.__file__) # workflows/ workflow_plugin_path = path.dirname(workflow_plugin_path) # package / egg folder workflow_plugin_path = path.dirname(workflow_plugin_path) except ImportError: # cloudify-manager/tests/plugins/__init__.py(c) workflow_plugin_path = path.abspath(plugins.__file__) # cloudify-manager/tests/plugins workflow_plugin_path = path.dirname(workflow_plugin_path) # cloudify-manager/tests workflow_plugin_path = path.dirname(workflow_plugin_path) # cloudify-manager workflow_plugin_path = path.dirname(workflow_plugin_path) # cloudify-manager/workflows workflow_plugin_path = path.join(workflow_plugin_path, 'workflows') plugins_path = path.dirname(path.realpath(plugins.__file__)) mock_workflow_plugins = path.dirname(path.realpath( mock_workflows.__file__)) app_path = path.join(self._tempdir, "plugins") # copying plugins if not use_mock_deployment_environment_workflows: for plugin_path in [plugins_path, workflow_plugin_path]: logger.info("Copying %s to %s", plugin_path, app_path) distutils.dir_util.copy_tree(plugin_path, app_path) else: # copying plugins and mock workflows for plugin_path in [plugins_path, mock_workflow_plugins]: logger.info("Copying %s to %s", plugin_path, app_path) distutils.dir_util.copy_tree(plugin_path, app_path) # copying the actual default install/uninstall workflow # plugin manually workflow_plugin_workflows_path = path.join( workflow_plugin_path, 'workflows') app_workflows_path = path.join(app_path, 'workflows') logger.info("Copying %s to %s", workflow_plugin_workflows_path, app_workflows_path) distutils.dir_util.copy_tree( workflow_plugin_workflows_path, app_workflows_path) # celery operations worker # if using real deployment environment workflows then 2 workers are # needed on the management queue num_of_management_workers = \ 1 if use_mock_deployment_environment_workflows else 2 self._celery_operations_worker_process = \ CeleryOperationsWorkerProcess( self._tempdir, self._plugins_tempdir, MANAGER_REST_PORT, num_of_management_workers) self._celery_operations_worker_process.start() # celery workflows worker self._celery_workflows_worker_process = \ CeleryWorkflowsWorkerProcess( self._tempdir, self._plugins_tempdir, MANAGER_REST_PORT, use_mock_deployment_environment_workflows) self._celery_workflows_worker_process.start() # workaround to update path manager_rest_path = \ path.dirname(path.dirname(path.dirname(__file__))) manager_rest_path = path.join(manager_rest_path, 'rest-service') sys.path.append(manager_rest_path) # file server fileserver_dir = path.join(self._tempdir, 'fileserver') os.mkdir(fileserver_dir) from manager_rest.file_server import FileServer from manager_rest.file_server import PORT as FS_PORT from manager_rest.util import copy_resources self._file_server_process = FileServer(fileserver_dir) self._file_server_process.start() # copy resources (base yaml/radials etc) resources_path = path.abspath(__file__) resources_path = path.dirname(resources_path) resources_path = path.dirname(resources_path) resources_path = path.dirname(resources_path) resources_path = path.join(resources_path, 'resources') copy_resources(fileserver_dir, resources_path) self.patch_source_urls(fileserver_dir) # manager rest file_server_base_uri = 'http://localhost:{0}'.format(FS_PORT) self._manager_rest_process = ManagerRestProcess( MANAGER_REST_PORT, fileserver_dir, file_server_base_uri, FILE_SERVER_BLUEPRINTS_FOLDER, FILE_SERVER_UPLOADED_BLUEPRINTS_FOLDER, FILE_SERVER_RESOURCES_URI, self._tempdir) self._manager_rest_process.start() except BaseException as error: logger.error("Error in test environment setup: %s", error) self._destroy() raise def _destroy(self): logger.info("Destroying test environment... [scope={0}]".format( self._scope)) if self._riemann_process: self._riemann_process.close() if self._elasticsearch_process: self._elasticsearch_process.close() if self._celery_operations_worker_process: self._celery_operations_worker_process.close() if self._celery_workflows_worker_process: self._celery_workflows_worker_process.close() if self._manager_rest_process: self._manager_rest_process.close() if self._file_server_process: self._file_server_process.stop() if self._tempdir: logger.info("Deleting test environment from: %s", self._tempdir) # shutil.rmtree(self._tempdir, ignore_errors=True) def _create_celery_worker(self, queue): return CeleryTestWorkerProcess( self._tempdir, self._plugins_tempdir, MANAGER_REST_PORT, queue) @staticmethod def create(scope=TestEnvironmentScope.PACKAGE, use_mock_deployment_environment_workflows=True): """ Creates the test environment if not already created. :param scope: The scope the test environment is created at. """ if not TestEnvironment._instance: TestEnvironment._instance = TestEnvironment( scope, use_mock_deployment_environment_workflows) return TestEnvironment._instance @staticmethod def destroy(scope=TestEnvironmentScope.PACKAGE): """ Destroys the test environment if the provided scope matches the scope the environment was created with. :param scope: The scope this method is invoked from. """ if TestEnvironment._instance and \ (TestEnvironment._instance._scope == scope): TestEnvironment._instance._destroy() @staticmethod def clean_plugins_tempdir(): """ Removes and creates a new plugins temporary directory. """ TestEnvironment._clean_tempdir('_plugins_tempdir') @staticmethod def clean_riemann_tempdir(): """ Removes and creates a new plugins temporary directory. """ TestEnvironment._clean_tempdir('_riemann_tempdir') @staticmethod def _clean_tempdir(prop): if TestEnvironment._instance: tmpdir = getattr(TestEnvironment._instance, prop) if path.exists(tmpdir): shutil.rmtree(tmpdir) os.makedirs(tmpdir) @staticmethod def create_celery_worker(queue): if TestEnvironment._instance: return TestEnvironment._instance._create_celery_worker(queue) @staticmethod def restart_celery_operations_worker(): if TestEnvironment._instance and \ (TestEnvironment._instance._celery_operations_worker_process): TestEnvironment._instance._celery_operations_worker_process \ .restart() @staticmethod def restart_celery_workflows_worker(): if TestEnvironment._instance and \ (TestEnvironment._instance._celery_workflows_worker_process): TestEnvironment._instance._celery_workflows_worker_process \ .restart() @staticmethod def restart_riemann(): if TestEnvironment._instance and \ (TestEnvironment._instance._riemann_process): TestEnvironment._instance._riemann_process.restart() @staticmethod def reset_elasticsearch_data(): if TestEnvironment._instance and \ TestEnvironment._instance._elasticsearch_process: TestEnvironment._instance._elasticsearch_process.reset_data() @staticmethod def _get_manager_root(): init_file = __file__ testenv_dir = dirname(init_file) tests_dir = dirname(testenv_dir) manager_dir = dirname(tests_dir) return manager_dir @classmethod def _get_riemann_config(cls): manager_dir = cls._get_manager_root() plugins_dir = os.path.join(manager_dir, 'plugins') riemann_dir = os.path.join(plugins_dir, 'riemann-controller') package_dir = os.path.join(riemann_dir, 'riemann_controller') resources_dir = os.path.join(package_dir, 'resources') manager_config = os.path.join(resources_dir, 'manager.config') return manager_config @classmethod def _get_libs_path(cls): return path.join(cls._get_manager_root(), '.libs') @staticmethod def riemann_workdir(): if TestEnvironment._instance: return TestEnvironment._instance._riemann_tempdir return None def patch_source_urls(self, resources): with open(path.join(resources, 'cloudify', 'types', 'types.yaml')) as f: types_yaml = yaml.safe_load(f.read()) for policy_type in types_yaml.get('policy_types', {}).values(): in_path = '/cloudify/policies/' source = policy_type['source'] if in_path in source: source = source[source.index(in_path) + 1:] policy_type['source'] = source for policy_trigger in types_yaml.get('policy_triggers', {}).values(): in_path = '/cloudify/triggers/' source = policy_trigger['source'] if in_path in source: source = source[source.index(in_path) + 1:] policy_trigger['source'] = source with open(path.join(resources, 'cloudify', 'types', 'types.yaml'), 'w') as f: f.write(yaml.safe_dump(types_yaml))
class TestEnvironment(object): manager_rest_process = None elasticsearch_process = None riemann_process = None file_server_process = None celery_management_worker_process = None def __init__(self, test_working_dir): super(TestEnvironment, self).__init__() self.test_working_dir = test_working_dir self.plugins_storage_dir = os.path.join(self.test_working_dir, 'plugins-storage') os.makedirs(self.plugins_storage_dir) self.fileserver_dir = path.join(self.test_working_dir, 'fileserver') self.rest_service_log_level = 'DEBUG' self.rest_service_log_path = path.join(self.test_working_dir, 'cloudify-rest-service.log') self.rest_service_log_file_size_MB = 100 self.rest_service_log_files_backup_count = 20 self.securest_log_level = 'DEBUG' self.securest_log_file = path.join(self.test_working_dir, 'rest-security-audit.log') self.securest_log_file_size_MB = 100 self.securest_log_files_backup_count = 20 self.amqp_username = '******' self.amqp_password = '******' self.events_and_logs_dir = \ path.join(self.test_working_dir, 'tests-events-and-logs') os.mkdir(self.events_and_logs_dir) def create(self): try: logger.info('Setting up test environment... workdir=[{0}]'.format( self.test_working_dir)) # events/logs polling start_events_and_logs_polling( logs_handler_retriever=self._logs_handler_retriever) self.start_elasticsearch() self.start_riemann() self.start_fileserver() self.start_manager_rest() self.create_management_worker() except BaseException as error: s_traceback = StringIO.StringIO() traceback.print_exc(file=s_traceback) logger.error("Error in test environment setup: %s", error) logger.error(s_traceback.getvalue()) self.destroy() raise def create_management_worker(self): mock_plugins_path = os.path.dirname(mock_plugins.__file__) os.environ['MOCK_PLUGINS_PATH'] = mock_plugins_path self.celery_management_worker_process = CeleryWorkerProcess( queues=['cloudify.management'], test_working_dir=self.test_working_dir, # these plugins are already installed. # so we just need to append to the includes. # note that these are not mocks, but the actual production # code plugins. additional_includes=[ 'riemann_controller.tasks', 'cloudify_system_workflows.deployment_environment', 'cloudify.plugins.workflows', 'diamond_agent.tasks', 'script_runner.tasks', # modules in the agent intended for backwards compatibility 'worker_installer.tasks', 'windows_agent_installer.tasks', 'plugin_installer.tasks', 'windows_plugin_installer.tasks', ], # we need higher concurrency since # 'deployment_environment.create' calls # 'plugin_installer.install' as a sub-task # and they are both executed inside # this worker concurrency=2) # copy plugins to worker env mock_plugins_path = os.path.dirname(mock_plugins.__file__) shutil.copytree(src=mock_plugins_path, dst=self.celery_management_worker_process.envdir, ignore=shutil.ignore_patterns('*.pyc')) def start_riemann(self): riemann_config_path = self._get_riemann_config() libs_path = self._get_libs_path() self.riemann_process = RiemannProcess(riemann_config_path, libs_path) self.riemann_process.start() def start_manager_rest(self): from manager_rest.file_server import PORT as FS_PORT file_server_base_uri = 'http://*****:*****@timestamp'] = timestamp es_client = Elasticsearch() doc_type = event['type'] # simulate log index res = es_client.index(index=index, doc_type=doc_type, body=event) if not res['created']: raise Exception('failed to write to elasticsearch') self.handle_logs = es_log_handler def _logs_handler_retriever(self): return self.handle_logs @classmethod def _get_riemann_config(cls): manager_dir = cls._get_manager_root() plugins_dir = os.path.join(manager_dir, 'plugins') riemann_dir = os.path.join(plugins_dir, 'riemann-controller') package_dir = os.path.join(riemann_dir, 'riemann_controller') resources_dir = os.path.join(package_dir, 'resources') manager_config = os.path.join(resources_dir, 'manager.config') return manager_config @classmethod def _get_libs_path(cls): return path.join(cls._get_manager_root(), '.libs') @staticmethod def reset_elasticsearch_data(): global testenv_instance testenv_instance.elasticsearch_process.reset_data() @staticmethod def stop_celery_management_worker(): global testenv_instance testenv_instance.celery_management_worker_process.stop() @staticmethod def read_celery_management_logs(): global testenv_instance process = testenv_instance.celery_management_worker_process return process.try_read_logfile() @classmethod def stop_all_celery_processes(cls): logger.info('Shutting down all celery processes') os.system("pkill -9 -f 'celery worker'") @staticmethod def start_celery_management_worker(): global testenv_instance testenv_instance.celery_management_worker_process.start() @staticmethod def riemann_cleanup(): global testenv_instance shutil.rmtree(TestEnvironment.riemann_workdir()) os.mkdir(TestEnvironment.riemann_workdir()) testenv_instance.riemann_process.restart() @staticmethod def riemann_workdir(): global testenv_instance return testenv_instance.\ celery_management_worker_process.\ riemann_config_dir @staticmethod def _get_manager_root(): init_file = __file__ testenv_dir = dirname(init_file) tests_dir = dirname(testenv_dir) manager_dir = dirname(tests_dir) return manager_dir @staticmethod def patch_source_urls(resources): with open(path.join(resources, 'cloudify', 'types', 'types.yaml')) as f: types_yaml = yaml.safe_load(f.read()) for policy_type in types_yaml.get('policy_types', {}).values(): in_path = '/cloudify/policies/' source = policy_type['source'] if in_path in source: source = source[source.index(in_path) + 1:] policy_type['source'] = source for policy_trigger in types_yaml.get('policy_triggers', {}).values(): in_path = '/cloudify/triggers/' source = policy_trigger['source'] if in_path in source: source = source[source.index(in_path) + 1:] policy_trigger['source'] = source with open(path.join(resources, 'cloudify', 'types', 'types.yaml'), 'w') as f: f.write(yaml.safe_dump(types_yaml))
class BaseServerTestCase(unittest.TestCase): def setUp(self): self.tmpdir = tempfile.mkdtemp() self.file_server = FileServer(self.tmpdir) self.file_server.start() storage_manager.storage_manager_module_name = \ STORAGE_MANAGER_MODULE_NAME server.reset_state(self.create_configuration()) util.copy_resources(config.instance().file_server_root) server.setup_app() server.app.config['Testing'] = True self.app = server.app.test_client() def tearDown(self): self.file_server.stop() def create_configuration(self): from manager_rest.config import Config test_config = Config() test_config.test_mode = True test_config.file_server_root = self.tmpdir test_config.file_server_base_uri = 'http://localhost:{0}'.format( FILE_SERVER_PORT) test_config.file_server_blueprints_folder = \ FILE_SERVER_BLUEPRINTS_FOLDER return test_config def post(self, resource_path, data, query_params=None): url = self._build_url(resource_path, query_params) result = self.app.post(url, content_type='application/json', data=json.dumps(data)) result.json = json.loads(result.data) return result def post_file(self, resource_path, file_path, query_params=None): with open(file_path) as f: result = self.app.post( self._build_url(resource_path, query_params), data=f.read()) result.json = json.loads(result.data) return result def put_file(self, resource_path, file_path, query_params=None): with open(file_path) as f: result = self.app.put( self._build_url(resource_path, query_params), data=f.read()) result.json = json.loads(result.data) return result def put(self, resource_path, data): result = self.app.put(urllib.quote(resource_path), content_type='application/json', data=json.dumps(data)) result.json = json.loads(result.data) return result def patch(self, resource_path, data): result = self.app.patch(urllib.quote(resource_path), content_type='application/json', data=json.dumps(data)) result.json = json.loads(result.data) return result def get(self, resource_path, query_params=None): result = self.app.get(self._build_url(resource_path, query_params)) result.json = json.loads(result.data) return result def head(self, resource_path): result = self.app.head(urllib.quote(resource_path)) return result def delete(self, resource_path): result = self.app.delete(urllib.quote(resource_path)) result.json = json.loads(result.data) return result def check_if_resource_on_fileserver(self, blueprint_id, resource_path): url = 'http://localhost:{0}/{1}/{2}/{3}'.format( FILE_SERVER_PORT, FILE_SERVER_BLUEPRINTS_FOLDER, blueprint_id, resource_path) try: urllib2.urlopen(url) return True except urllib2.HTTPError: return False def _build_url(self, resource_path, query_params): query_string = '' if query_params and len(query_params) > 0: query_string += '&' + urllib.urlencode(query_params) return '{0}?{1}'.format(urllib.quote(resource_path), query_string)
class BaseServerTestCase(unittest.TestCase): def __init__(self, *args, **kwargs): super(BaseServerTestCase, self).__init__(*args, **kwargs) def create_client(self, headers=None): client = CloudifyClient(host="localhost", headers=headers) mock_http_client = MockHTTPClient(self.app, headers=headers) client._client = mock_http_client client.blueprints.api = mock_http_client client.deployments.api = mock_http_client client.deployments.outputs.api = mock_http_client client.deployment_modifications.api = mock_http_client client.executions.api = mock_http_client client.nodes.api = mock_http_client client.node_instances.api = mock_http_client client.manager.api = mock_http_client client.evaluate.api = mock_http_client client.tokens.api = mock_http_client return client def setUp(self): self.tmpdir = tempfile.mkdtemp() self.rest_service_log = tempfile.mkstemp()[1] self.securest_log_file = tempfile.mkstemp()[1] self.file_server = FileServer(self.tmpdir) self.addCleanup(self.cleanup) self.file_server.start() storage_manager.storage_manager_module_name = STORAGE_MANAGER_MODULE_NAME # workaround for setting the rest service log path, since it's # needed when 'server' module is imported. # right after the import the log path is set normally like the rest # of the variables (used in the reset_state) tmp_conf_file = tempfile.mkstemp()[1] json.dump( { "rest_service_log_path": self.rest_service_log, "rest_service_log_file_size_MB": 1, "rest_service_log_files_backup_count": 1, "rest_service_log_level": "DEBUG", }, open(tmp_conf_file, "w"), ) os.environ["MANAGER_REST_CONFIG_PATH"] = tmp_conf_file try: from manager_rest import server finally: del (os.environ["MANAGER_REST_CONFIG_PATH"]) server.reset_state(self.create_configuration()) utils.copy_resources(config.instance().file_server_root) server.setup_app() server.app.config["Testing"] = True self.app = server.app.test_client() self.client = self.create_client() self.initialize_provider_context() def cleanup(self): self.quiet_delete(self.rest_service_log) self.quiet_delete(self.securest_log_file) if self.file_server: self.file_server.stop() def initialize_provider_context(self, client=None): if not client: client = self.client # creating an empty bootstrap context client.manager.create_context(self.id(), {"cloudify": {}}) def create_configuration(self): from manager_rest.config import Config test_config = Config() test_config.test_mode = True test_config.file_server_root = self.tmpdir test_config.file_server_base_uri = "http://localhost:{0}".format(FILE_SERVER_PORT) test_config.file_server_blueprints_folder = FILE_SERVER_BLUEPRINTS_FOLDER test_config.file_server_uploaded_blueprints_folder = FILE_SERVER_UPLOADED_BLUEPRINTS_FOLDER test_config.file_server_resources_uri = FILE_SERVER_RESOURCES_URI test_config.rest_service_log_level = "DEBUG" test_config.rest_service_log_path = self.rest_service_log test_config.rest_service_log_file_size_MB = (100,) test_config.rest_service_log_files_backup_count = 20 test_config.security_audit_log_level = "DEBUG" test_config.security_audit_log_file = self.securest_log_file test_config.security_audit_log_file_size_MB = 100 test_config.security_audit_log_files_backup_count = 20 return test_config def _version_url(self, url): # method for versionifying URLs for requests which don't go through # the REST client; the version is taken from the REST client regardless if CLIENT_API_VERSION not in url: url = "/api/{0}{1}".format(CLIENT_API_VERSION, url) return url def post(self, resource_path, data, query_params=None): url = self._version_url(resource_path) result = self.app.post( urllib.quote(url), content_type="application/json", data=json.dumps(data), query_string=build_query_string(query_params), ) result.json = json.loads(result.data) return result def post_file(self, resource_path, file_path, query_params=None): url = self._version_url(resource_path) with open(file_path) as f: result = self.app.post(urllib.quote(url), data=f.read(), query_string=build_query_string(query_params)) result.json = json.loads(result.data) return result def put_file(self, resource_path, file_path, query_params=None): url = self._version_url(resource_path) with open(file_path) as f: result = self.app.put(urllib.quote(url), data=f.read(), query_string=build_query_string(query_params)) result.json = json.loads(result.data) return result def put(self, resource_path, data=None, query_params=None): url = self._version_url(resource_path) result = self.app.put( urllib.quote(url), content_type="application/json", data=json.dumps(data) if data else None, query_string=build_query_string(query_params), ) result.json = json.loads(result.data) return result def patch(self, resource_path, data): url = self._version_url(resource_path) result = self.app.patch(urllib.quote(url), content_type="application/json", data=json.dumps(data)) result.json = json.loads(result.data) return result def get(self, resource_path, query_params=None, headers=None): url = self._version_url(resource_path) result = self.app.get(urllib.quote(url), headers=headers, query_string=build_query_string(query_params)) result.json = json.loads(result.data) return result def head(self, resource_path): url = self._version_url(resource_path) result = self.app.head(urllib.quote(url)) return result def delete(self, resource_path, query_params=None): url = self._version_url(resource_path) result = self.app.delete(urllib.quote(url), query_string=build_query_string(query_params)) result.json = json.loads(result.data) return result def check_if_resource_on_fileserver(self, blueprint_id, resource_path): url = "http://localhost:{0}/{1}/{2}/{3}".format( FILE_SERVER_PORT, FILE_SERVER_BLUEPRINTS_FOLDER, blueprint_id, resource_path ) try: urllib2.urlopen(url) return True except urllib2.HTTPError: return False def get_blueprint_path(self, blueprint_dir_name): return os.path.join(os.path.dirname(os.path.abspath(__file__)), blueprint_dir_name) def archive_mock_blueprint(self, archive_func=archiving.make_targzfile, blueprint_dir="mock_blueprint"): archive_path = tempfile.mkstemp()[1] source_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), blueprint_dir) archive_func(archive_path, source_dir) return archive_path def get_mock_blueprint_path(self): return os.path.join(os.path.dirname(os.path.abspath(__file__)), "mock_blueprint", "blueprint.yaml") def put_blueprint_args( self, blueprint_file_name=None, blueprint_id="blueprint", archive_func=archiving.make_targzfile, blueprint_dir="mock_blueprint", ): resource_path = self._version_url("/blueprints/{1}".format(CLIENT_API_VERSION, blueprint_id)) result = [resource_path, self.archive_mock_blueprint(archive_func, blueprint_dir)] if blueprint_file_name: data = {"application_file_name": blueprint_file_name} else: data = {} result.append(data) return result def put_deployment( self, deployment_id="deployment", blueprint_file_name=None, blueprint_id="blueprint", inputs=None ): blueprint_response = self.put_file(*self.put_blueprint_args(blueprint_file_name, blueprint_id)).json if "error_code" in blueprint_response: raise RuntimeError("{}: {}".format(blueprint_response["error_code"], blueprint_response["message"])) blueprint_id = blueprint_response["id"] deployment = self.client.deployments.create(blueprint_id, deployment_id, inputs=inputs) return blueprint_id, deployment.id, blueprint_response, deployment def wait_for_url(self, url, timeout=5): end = time.time() + timeout while end >= time.time(): try: status = urllib.urlopen(url).getcode() if status == 200: return except IOError: time.sleep(1) raise RuntimeError("Url {0} is not available (waited {1} " "seconds)".format(url, timeout)) @staticmethod def quiet_delete(file_path): try: os.remove(file_path) except: pass
def __init__(self, scope, use_mock_deployment_environment_workflows=True): try: TestEnvironmentScope.validate(scope) logger.info("Setting up test environment... [scope={0}]".format( scope)) self._scope = scope # temp directory self._tempdir = tempfile.mkdtemp(suffix="test", prefix="cloudify") self._plugins_tempdir = path.join(self._tempdir, "cosmo-work") self._riemann_tempdir = path.join(self._tempdir, "riemann") logger.info("Test environment will be stored in: %s", self._tempdir) if not path.exists(self._plugins_tempdir): os.makedirs(self._plugins_tempdir) if not path.exists(self._riemann_tempdir): os.makedirs(self._riemann_tempdir) # events/logs polling start_events_and_logs_polling() # riemann riemann_config_path = self._get_riemann_config() libs_path = self._get_libs_path() self._riemann_process = RiemannProcess(riemann_config_path, libs_path) self._riemann_process.start() # elasticsearch self._elasticsearch_process = ElasticSearchProcess() self._elasticsearch_process.start() # copy all plugins to app path try: import workflows # workflows/__init__.py(c) workflow_plugin_path = path.abspath(workflows.__file__) # workflows/ workflow_plugin_path = path.dirname(workflow_plugin_path) # package / egg folder workflow_plugin_path = path.dirname(workflow_plugin_path) except ImportError: # cloudify-manager/tests/plugins/__init__.py(c) workflow_plugin_path = path.abspath(plugins.__file__) # cloudify-manager/tests/plugins workflow_plugin_path = path.dirname(workflow_plugin_path) # cloudify-manager/tests workflow_plugin_path = path.dirname(workflow_plugin_path) # cloudify-manager workflow_plugin_path = path.dirname(workflow_plugin_path) # cloudify-manager/workflows workflow_plugin_path = path.join(workflow_plugin_path, 'workflows') plugins_path = path.dirname(path.realpath(plugins.__file__)) mock_workflow_plugins = path.dirname(path.realpath( mock_workflows.__file__)) app_path = path.join(self._tempdir, "plugins") # copying plugins if not use_mock_deployment_environment_workflows: for plugin_path in [plugins_path, workflow_plugin_path]: logger.info("Copying %s to %s", plugin_path, app_path) distutils.dir_util.copy_tree(plugin_path, app_path) else: # copying plugins and mock workflows for plugin_path in [plugins_path, mock_workflow_plugins]: logger.info("Copying %s to %s", plugin_path, app_path) distutils.dir_util.copy_tree(plugin_path, app_path) # copying the actual default install/uninstall workflow # plugin manually workflow_plugin_workflows_path = path.join( workflow_plugin_path, 'workflows') app_workflows_path = path.join(app_path, 'workflows') logger.info("Copying %s to %s", workflow_plugin_workflows_path, app_workflows_path) distutils.dir_util.copy_tree( workflow_plugin_workflows_path, app_workflows_path) # celery operations worker # if using real deployment environment workflows then 2 workers are # needed on the management queue num_of_management_workers = \ 1 if use_mock_deployment_environment_workflows else 2 self._celery_operations_worker_process = \ CeleryOperationsWorkerProcess( self._tempdir, self._plugins_tempdir, MANAGER_REST_PORT, num_of_management_workers) self._celery_operations_worker_process.start() # celery workflows worker self._celery_workflows_worker_process = \ CeleryWorkflowsWorkerProcess( self._tempdir, self._plugins_tempdir, MANAGER_REST_PORT, use_mock_deployment_environment_workflows) self._celery_workflows_worker_process.start() # workaround to update path manager_rest_path = \ path.dirname(path.dirname(path.dirname(__file__))) manager_rest_path = path.join(manager_rest_path, 'rest-service') sys.path.append(manager_rest_path) # file server fileserver_dir = path.join(self._tempdir, 'fileserver') os.mkdir(fileserver_dir) from manager_rest.file_server import FileServer from manager_rest.file_server import PORT as FS_PORT from manager_rest.util import copy_resources self._file_server_process = FileServer(fileserver_dir) self._file_server_process.start() # copy resources (base yaml/radials etc) resources_path = path.abspath(__file__) resources_path = path.dirname(resources_path) resources_path = path.dirname(resources_path) resources_path = path.dirname(resources_path) resources_path = path.join(resources_path, 'resources') copy_resources(fileserver_dir, resources_path) self.patch_source_urls(fileserver_dir) # manager rest file_server_base_uri = 'http://localhost:{0}'.format(FS_PORT) self._manager_rest_process = ManagerRestProcess( MANAGER_REST_PORT, fileserver_dir, file_server_base_uri, FILE_SERVER_BLUEPRINTS_FOLDER, FILE_SERVER_UPLOADED_BLUEPRINTS_FOLDER, FILE_SERVER_RESOURCES_URI, self._tempdir) self._manager_rest_process.start() except BaseException as error: logger.error("Error in test environment setup: %s", error) self._destroy() raise
class BaseServerTestCase(unittest.TestCase): def setUp(self): self.tmpdir = tempfile.mkdtemp() self.file_server = FileServer(self.tmpdir) self.file_server.start() storage_manager.storage_manager_module_name = \ STORAGE_MANAGER_MODULE_NAME server.reset_state(self.create_configuration()) util.copy_resources(config.instance().file_server_root) server.setup_app() server.app.config['Testing'] = True self.app = server.app.test_client() self.client = CloudifyClient('localhost') mock_http_client = MockHTTPClient(self.app) self.client.blueprints.api = mock_http_client self.client.deployments.api = mock_http_client self.client.deployments.outputs.api = mock_http_client self.client.executions.api = mock_http_client self.client.nodes.api = mock_http_client self.client.node_instances.api = mock_http_client self.client.manager.api = mock_http_client def tearDown(self): self.file_server.stop() def create_configuration(self): from manager_rest.config import Config test_config = Config() test_config.test_mode = True test_config.file_server_root = self.tmpdir test_config.file_server_base_uri = 'http://localhost:{0}'.format( FILE_SERVER_PORT) test_config.file_server_blueprints_folder = \ FILE_SERVER_BLUEPRINTS_FOLDER test_config.file_server_uploaded_blueprints_folder = \ FILE_SERVER_UPLOADED_BLUEPRINTS_FOLDER test_config.file_server_resources_uri = FILE_SERVER_RESOURCES_URI return test_config def post(self, resource_path, data, query_params=None): url = self._build_url(resource_path, query_params) result = self.app.post(url, content_type='application/json', data=json.dumps(data)) result.json = json.loads(result.data) return result def post_file(self, resource_path, file_path, query_params=None): with open(file_path) as f: result = self.app.post(self._build_url(resource_path, query_params), data=f.read()) result.json = json.loads(result.data) return result def put_file(self, resource_path, file_path, query_params=None): with open(file_path) as f: result = self.app.put(self._build_url(resource_path, query_params), data=f.read()) result.json = json.loads(result.data) return result def put(self, resource_path, data): result = self.app.put(urllib.quote(resource_path), content_type='application/json', data=json.dumps(data)) result.json = json.loads(result.data) return result def patch(self, resource_path, data): result = self.app.patch(urllib.quote(resource_path), content_type='application/json', data=json.dumps(data)) result.json = json.loads(result.data) return result def get(self, resource_path, query_params=None): result = self.app.get(self._build_url(resource_path, query_params)) result.json = json.loads(result.data) return result def head(self, resource_path): result = self.app.head(urllib.quote(resource_path)) return result def delete(self, resource_path, query_params=None): result = self.app.delete(self._build_url(resource_path, query_params)) result.json = json.loads(result.data) return result def check_if_resource_on_fileserver(self, blueprint_id, resource_path): url = 'http://localhost:{0}/{1}/{2}/{3}'.format( FILE_SERVER_PORT, FILE_SERVER_BLUEPRINTS_FOLDER, blueprint_id, resource_path) try: urllib2.urlopen(url) return True except urllib2.HTTPError: return False def put_blueprint_args(self, blueprint_file_name=None, blueprint_id='blueprint'): def make_tarfile(output_filename, source_dir): with tarfile.open(output_filename, "w:gz") as tar: tar.add(source_dir, arcname=os.path.basename(source_dir)) def tar_mock_blueprint(): tar_path = tempfile.mktemp() source_dir = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'mock_blueprint') make_tarfile(tar_path, source_dir) return tar_path resource_path = '/blueprints/{0}'.format(blueprint_id) result = [ resource_path, tar_mock_blueprint(), ] if blueprint_file_name: data = {'application_file_name': blueprint_file_name} else: data = {} result.append(data) return result def put_deployment(self, deployment_id='deployment', blueprint_file_name=None, blueprint_id='blueprint', inputs=None): blueprint_response = self.put_file( *self.put_blueprint_args(blueprint_file_name, blueprint_id)).json if 'error_code' in blueprint_response: raise RuntimeError('{}: {}'.format( blueprint_response['error_code'], blueprint_response['message'])) blueprint_id = blueprint_response['id'] deployment = self.client.deployments.create(blueprint_id, deployment_id, inputs=inputs) return blueprint_id, deployment.id, blueprint_response, deployment def _build_url(self, resource_path, query_params): query_string = '' if query_params and len(query_params) > 0: query_string += '&' + urllib.urlencode(query_params) return '{0}?{1}'.format(urllib.quote(resource_path), query_string) return resource_path
class BaseServerTestCase(unittest.TestCase): def __init__(self, *args, **kwargs): super(BaseServerTestCase, self).__init__(*args, **kwargs) def create_client(self, headers=None): client = CloudifyClient(host='localhost', headers=headers) mock_http_client = MockHTTPClient(self.app, headers=headers) client._client = mock_http_client client.blueprints.api = mock_http_client client.deployments.api = mock_http_client client.deployments.outputs.api = mock_http_client client.deployment_modifications.api = mock_http_client client.executions.api = mock_http_client client.nodes.api = mock_http_client client.node_instances.api = mock_http_client client.manager.api = mock_http_client client.evaluate.api = mock_http_client client.tokens.api = mock_http_client client.events.api = mock_http_client # only exists in v2 and above if CLIENT_API_VERSION != 'v1': client.plugins.api = mock_http_client client.snapshots.api = mock_http_client # only exists in v2.1 and above if CLIENT_API_VERSION != 'v2': client.maintenance_mode.api = mock_http_client client.deployment_updates.api = mock_http_client return client def setUp(self): self.tmpdir = tempfile.mkdtemp() self.rest_service_log = tempfile.mkstemp()[1] self.securest_log_file = tempfile.mkstemp()[1] self.file_server = FileServer(self.tmpdir) self.maintenance_mode_dir = tempfile.mkdtemp() self.addCleanup(self.cleanup) self.file_server.start() storage_manager.storage_manager_module_name = \ STORAGE_MANAGER_MODULE_NAME # workaround for setting the rest service log path, since it's # needed when 'server' module is imported. # right after the import the log path is set normally like the rest # of the variables (used in the reset_state) tmp_conf_file = tempfile.mkstemp()[1] json.dump( { 'rest_service_log_path': self.rest_service_log, 'rest_service_log_file_size_MB': 1, 'rest_service_log_files_backup_count': 1, 'rest_service_log_level': 'DEBUG' }, open(tmp_conf_file, 'w')) os.environ['MANAGER_REST_CONFIG_PATH'] = tmp_conf_file try: from manager_rest import server finally: del (os.environ['MANAGER_REST_CONFIG_PATH']) self.server_configuration = self.create_configuration() server.reset_state(self.server_configuration) utils.copy_resources(config.instance().file_server_root) server.setup_app() server.app.config['Testing'] = True self.app = server.app.test_client() self.client = self.create_client() self.initialize_provider_context() def cleanup(self): self.quiet_delete(self.rest_service_log) self.quiet_delete(self.securest_log_file) self.quiet_delete_directory(self.maintenance_mode_dir) if self.file_server: self.file_server.stop() def initialize_provider_context(self, client=None): if not client: client = self.client # creating an empty bootstrap context client.manager.create_context(self.id(), {'cloudify': {}}) def create_configuration(self): from manager_rest.config import Config test_config = Config() test_config.test_mode = True test_config.file_server_root = self.tmpdir test_config.file_server_base_uri = 'http://localhost:{0}'.format( FILE_SERVER_PORT) test_config.file_server_blueprints_folder = \ FILE_SERVER_BLUEPRINTS_FOLDER test_config.file_server_deployments_folder = \ FILE_SERVER_DEPLOYMENTS_FOLDER test_config.file_server_uploaded_blueprints_folder = \ FILE_SERVER_UPLOADED_BLUEPRINTS_FOLDER test_config.file_server_resources_uri = FILE_SERVER_RESOURCES_URI test_config.rest_service_log_level = 'DEBUG' test_config.rest_service_log_path = self.rest_service_log test_config.rest_service_log_file_size_MB = 100, test_config.rest_service_log_files_backup_count = 20 test_config.security_audit_log_level = 'DEBUG' test_config.security_audit_log_file = self.securest_log_file test_config.security_audit_log_file_size_MB = 100 test_config.security_audit_log_files_backup_count = 20 test_config._maintenance_folder = self.maintenance_mode_dir return test_config def _version_url(self, url): # method for versionifying URLs for requests which don't go through # the REST client; the version is taken from the REST client regardless if CLIENT_API_VERSION not in url: url = '/api/{0}{1}'.format(CLIENT_API_VERSION, url) return url def post(self, resource_path, data, query_params=None): url = self._version_url(resource_path) result = self.app.post(urllib.quote(url), content_type='application/json', data=json.dumps(data), query_string=build_query_string(query_params)) result.json = json.loads(result.data) return result def post_file(self, resource_path, file_path, query_params=None): url = self._version_url(resource_path) with open(file_path) as f: result = self.app.post( urllib.quote(url), data=f.read(), query_string=build_query_string(query_params)) result.json = json.loads(result.data) return result def put_file(self, resource_path, file_path, query_params=None): url = self._version_url(resource_path) with open(file_path) as f: result = self.app.put( urllib.quote(url), data=f.read(), query_string=build_query_string(query_params)) result.json = json.loads(result.data) return result def put(self, resource_path, data=None, query_params=None): url = self._version_url(resource_path) result = self.app.put(urllib.quote(url), content_type='application/json', data=json.dumps(data) if data else None, query_string=build_query_string(query_params)) result.json = json.loads(result.data) return result def patch(self, resource_path, data): url = self._version_url(resource_path) result = self.app.patch(urllib.quote(url), content_type='application/json', data=json.dumps(data)) result.json = json.loads(result.data) return result def get(self, resource_path, query_params=None, headers=None): url = self._version_url(resource_path) result = self.app.get(urllib.quote(url), headers=headers, query_string=build_query_string(query_params)) result.json = json.loads(result.data) return result def head(self, resource_path): url = self._version_url(resource_path) result = self.app.head(urllib.quote(url)) return result def delete(self, resource_path, query_params=None): url = self._version_url(resource_path) result = self.app.delete(urllib.quote(url), query_string=build_query_string(query_params)) result.json = json.loads(result.data) return result def _check_if_resource_on_fileserver(self, folder, container_id, resource_path): url = 'http://localhost:{0}/{1}/{2}/{3}'.format( FILE_SERVER_PORT, folder, container_id, resource_path) try: urllib2.urlopen(url) return True except urllib2.HTTPError: return False def check_if_resource_on_fileserver(self, blueprint_id, resource_path): return self._check_if_resource_on_fileserver( FILE_SERVER_BLUEPRINTS_FOLDER, blueprint_id, resource_path) def check_if_deployment_resource_on_fileserver(self, deployment_id, resource_path): return self._check_if_resource_on_fileserver( FILE_SERVER_DEPLOYMENTS_FOLDER, deployment_id, resource_path) def get_blueprint_path(self, blueprint_dir_name): return os.path.join(os.path.dirname(os.path.abspath(__file__)), blueprint_dir_name) def archive_mock_blueprint(self, archive_func=archiving.make_targzfile, blueprint_dir='mock_blueprint'): archive_path = tempfile.mkstemp()[1] source_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), blueprint_dir) archive_func(archive_path, source_dir) return archive_path def get_mock_blueprint_path(self): return os.path.join(os.path.dirname(os.path.abspath(__file__)), 'mock_blueprint', 'blueprint.yaml') def put_blueprint_args(self, blueprint_file_name=None, blueprint_id='blueprint', archive_func=archiving.make_targzfile, blueprint_dir='mock_blueprint'): resource_path = self._version_url('/blueprints/{1}'.format( CLIENT_API_VERSION, blueprint_id)) result = [ resource_path, self.archive_mock_blueprint(archive_func, blueprint_dir), ] if blueprint_file_name: data = {'application_file_name': blueprint_file_name} else: data = {} result.append(data) return result def put_deployment(self, deployment_id='deployment', blueprint_file_name=None, blueprint_id='blueprint', inputs=None): blueprint_response = self.put_file( *self.put_blueprint_args(blueprint_file_name, blueprint_id)).json if 'error_code' in blueprint_response: raise RuntimeError('{}: {}'.format( blueprint_response['error_code'], blueprint_response['message'])) blueprint_id = blueprint_response['id'] deployment = self.client.deployments.create(blueprint_id, deployment_id, inputs=inputs) return blueprint_id, deployment.id, blueprint_response, deployment def upload_plugin(self, package_name, package_version): temp_file_path = self.create_wheel(package_name, package_version) response = self.post_file('/plugins', temp_file_path) os.remove(temp_file_path) return response def create_wheel(self, package_name, package_version): module_src = '{0}=={1}'.format(package_name, package_version) wagon_client = Wagon(module_src) return wagon_client.create( archive_destination_dir=tempfile.gettempdir(), force=True) def wait_for_url(self, url, timeout=5): end = time.time() + timeout while end >= time.time(): try: status = urllib.urlopen(url).getcode() if status == 200: return except IOError: time.sleep(1) raise RuntimeError('Url {0} is not available (waited {1} ' 'seconds)'.format(url, timeout)) @staticmethod def quiet_delete(file_path): try: os.remove(file_path) except: pass @staticmethod def quiet_delete_directory(file_path): shutil.rmtree(file_path, ignore_errors=True) def wait_for_deployment_creation(self, client, deployment_id): env_creation_execution = None deployment_executions = client.executions.list(deployment_id) for execution in deployment_executions: if execution.workflow_id == 'create_deployment_environment': env_creation_execution = execution break if env_creation_execution: self.wait_for_execution(client, env_creation_execution) @staticmethod def wait_for_execution(client, execution, timeout=900): # Poll for execution status until execution ends deadline = time.time() + timeout while True: if time.time() > deadline: raise Exception( 'execution of operation {0} for deployment {1} timed out'. format(execution.workflow_id, execution.deployment_id)) execution = client.executions.get(execution.id) if execution.status in Execution.END_STATES: break time.sleep(3)
class TestEnvironment(object): """ Creates the cosmo test environment: - Riemann server. - Elasticsearch server. - Celery worker. - Ruote service. - Prepares celery app dir with plugins from cosmo module and official riemann configurer and plugin installer. """ _instance = None _celery_operations_worker_process = None _celery_workflows_worker_process = None _riemann_process = None _elasticsearch_process = None _manager_rest_process = None _tempdir = None _plugins_tempdir = None _scope = None _file_server_process = None def __init__(self, scope, use_mock_workers_installation=True): try: TestEnvironmentScope.validate(scope) logger.info("Setting up test environment... [scope={0}]".format( scope)) self._scope = scope # temp directory self._tempdir = tempfile.mkdtemp(suffix="test", prefix="cloudify") self._plugins_tempdir = path.join(self._tempdir, "cosmo-work") self._riemann_tempdir = path.join(self._tempdir, "riemann") logger.info("Test environment will be stored in: %s", self._tempdir) if not path.exists(self._plugins_tempdir): os.makedirs(self._plugins_tempdir) if not path.exists(self._riemann_tempdir): os.makedirs(self._riemann_tempdir) # events/logs polling start_events_and_logs_polling() # riemann riemann_config_path = self._get_riemann_config() libs_path = self._get_libs_path() self._riemann_process = RiemannProcess(riemann_config_path, libs_path) self._riemann_process.start() # elasticsearch self._elasticsearch_process = ElasticSearchProcess() self._elasticsearch_process.start() # copy all plugins to app path try: import workflows # workflows/__init__.py(c) workflow_plugin_path = path.abspath(workflows.__file__) # workflows/ workflow_plugin_path = path.dirname(workflow_plugin_path) # package / egg folder workflow_plugin_path = path.dirname(workflow_plugin_path) except ImportError: # cloudify-manager/tests/plugins/__init__.py(c) workflow_plugin_path = path.abspath(plugins.__file__) # cloudify-manager/tests/plugins workflow_plugin_path = path.dirname(workflow_plugin_path) # cloudify-manager/tests workflow_plugin_path = path.dirname(workflow_plugin_path) # cloudify-manager workflow_plugin_path = path.dirname(workflow_plugin_path) # cloudify-manager/workflows workflow_plugin_path = path.join(workflow_plugin_path, 'workflows') plugins_path = path.dirname(path.realpath(plugins.__file__)) mock_workflow_plugins = path.dirname(path.realpath( mock_workflows.__file__)) app_path = path.join(self._tempdir, "plugins") # copying plugins if not use_mock_workers_installation: for plugin_path in [plugins_path, workflow_plugin_path]: logger.info("Copying %s to %s", plugin_path, app_path) distutils.dir_util.copy_tree(plugin_path, app_path) else: # copying plugins and mock workflows for plugin_path in [plugins_path, mock_workflow_plugins]: logger.info("Copying %s to %s", plugin_path, app_path) distutils.dir_util.copy_tree(plugin_path, app_path) # copying the actual default install/uninstall workflow # plugin manually workflow_plugin_workflows_path = path.join( workflow_plugin_path, 'workflows') app_workflows_path = path.join(app_path, 'workflows') logger.info("Copying %s to %s", workflow_plugin_workflows_path, app_workflows_path) distutils.dir_util.copy_tree( workflow_plugin_workflows_path, app_workflows_path) # celery operations worker # if using real worker installation workflow then 2 workers are # needed on the management queue num_of_management_workers = \ 1 if use_mock_workers_installation else 2 self._celery_operations_worker_process = \ CeleryOperationsWorkerProcess( self._tempdir, self._plugins_tempdir, MANAGER_REST_PORT, num_of_management_workers) self._celery_operations_worker_process.start() # celery workflows worker self._celery_workflows_worker_process = \ CeleryWorkflowsWorkerProcess( self._tempdir, self._plugins_tempdir, MANAGER_REST_PORT, use_mock_workers_installation) self._celery_workflows_worker_process.start() # workaround to update path manager_rest_path = \ path.dirname(path.dirname(path.dirname(__file__))) manager_rest_path = path.join(manager_rest_path, 'rest-service') sys.path.append(manager_rest_path) # file server fileserver_dir = path.join(self._tempdir, 'fileserver') os.mkdir(fileserver_dir) from manager_rest.file_server import FileServer from manager_rest.file_server import PORT as FS_PORT from manager_rest.util import copy_resources self._file_server_process = FileServer(fileserver_dir) self._file_server_process.start() # copy resources (base yaml/radials etc) resources_path = path.abspath(__file__) resources_path = path.dirname(resources_path) resources_path = path.dirname(resources_path) resources_path = path.dirname(resources_path) resources_path = path.join(resources_path, 'resources') copy_resources(fileserver_dir, resources_path) # manager rest file_server_base_uri = 'http://localhost:{0}'.format(FS_PORT) self._manager_rest_process = ManagerRestProcess( MANAGER_REST_PORT, fileserver_dir, file_server_base_uri, FILE_SERVER_BLUEPRINTS_FOLDER, FILE_SERVER_UPLOADED_BLUEPRINTS_FOLDER, FILE_SERVER_RESOURCES_URI, self._tempdir) self._manager_rest_process.start() except BaseException as error: logger.error("Error in test environment setup: %s", error) self._destroy() raise def _destroy(self): logger.info("Destroying test environment... [scope={0}]".format( self._scope)) if self._riemann_process: self._riemann_process.close() if self._elasticsearch_process: self._elasticsearch_process.close() if self._celery_operations_worker_process: self._celery_operations_worker_process.close() if self._celery_workflows_worker_process: self._celery_workflows_worker_process.close() if self._manager_rest_process: self._manager_rest_process.close() if self._file_server_process: self._file_server_process.stop() if self._tempdir: logger.info("Deleting test environment from: %s", self._tempdir) # shutil.rmtree(self._tempdir, ignore_errors=True) def _create_celery_worker(self, queue): return CeleryTestWorkerProcess( self._tempdir, self._plugins_tempdir, MANAGER_REST_PORT, queue) @staticmethod def create(scope=TestEnvironmentScope.PACKAGE, use_mock_workers_installation=True): """ Creates the test environment if not already created. :param scope: The scope the test environment is created at. """ if not TestEnvironment._instance: TestEnvironment._instance = TestEnvironment( scope, use_mock_workers_installation) return TestEnvironment._instance @staticmethod def destroy(scope=TestEnvironmentScope.PACKAGE): """ Destroys the test environment if the provided scope matches the scope the environment was created with. :param scope: The scope this method is invoked from. """ if TestEnvironment._instance and \ (TestEnvironment._instance._scope == scope): TestEnvironment._instance._destroy() @staticmethod def clean_plugins_tempdir(): """ Removes and creates a new plugins temporary directory. """ TestEnvironment._clean_tempdir('_plugins_tempdir') @staticmethod def clean_riemann_tempdir(): """ Removes and creates a new plugins temporary directory. """ TestEnvironment._clean_tempdir('_riemann_tempdir') @staticmethod def _clean_tempdir(prop): if TestEnvironment._instance: tmpdir = getattr(TestEnvironment._instance, prop) if path.exists(tmpdir): shutil.rmtree(tmpdir) os.makedirs(tmpdir) @staticmethod def create_celery_worker(queue): if TestEnvironment._instance: return TestEnvironment._instance._create_celery_worker(queue) @staticmethod def restart_celery_operations_worker(): if TestEnvironment._instance and \ (TestEnvironment._instance._celery_operations_worker_process): TestEnvironment._instance._celery_operations_worker_process \ .restart() @staticmethod def restart_celery_workflows_worker(): if TestEnvironment._instance and \ (TestEnvironment._instance._celery_workflows_worker_process): TestEnvironment._instance._celery_workflows_worker_process \ .restart() @staticmethod def restart_riemann(): if TestEnvironment._instance and \ (TestEnvironment._instance._riemann_process): TestEnvironment._instance._riemann_process.restart() @staticmethod def reset_elasticsearch_data(): if TestEnvironment._instance and \ TestEnvironment._instance._elasticsearch_process: TestEnvironment._instance._elasticsearch_process.reset_data() @staticmethod def _get_manager_root(): init_file = __file__ testenv_dir = dirname(init_file) tests_dir = dirname(testenv_dir) manager_dir = dirname(tests_dir) return manager_dir @classmethod def _get_riemann_config(cls): manager_dir = cls._get_manager_root() plugins_dir = os.path.join(manager_dir, 'plugins') riemann_dir = os.path.join(plugins_dir, 'riemann-controller') package_dir = os.path.join(riemann_dir, 'riemann_controller') resources_dir = os.path.join(package_dir, 'resources') manager_config = os.path.join(resources_dir, 'manager.config') return manager_config @classmethod def _get_libs_path(cls): return path.join(cls._get_manager_root(), '.libs') @staticmethod def riemann_workdir(): if TestEnvironment._instance: return TestEnvironment._instance._riemann_tempdir return None
class TestEnvironment(object): manager_rest_process = None elasticsearch_process = None riemann_process = None file_server_process = None celery_management_worker_process = None def __init__(self, test_working_dir): super(TestEnvironment, self).__init__() self.test_working_dir = test_working_dir self.plugins_storage_dir = os.path.join(self.test_working_dir, "plugins-storage") os.makedirs(self.plugins_storage_dir) self.fileserver_dir = path.join(self.test_working_dir, "fileserver") self.rest_service_log_level = "DEBUG" self.rest_service_log_path = path.join(self.test_working_dir, "cloudify-rest-service.log") self.rest_service_log_file_size_MB = 100 self.rest_service_log_files_backup_count = 20 self.securest_log_level = "DEBUG" self.securest_log_file = path.join(self.test_working_dir, "rest-security-audit.log") self.securest_log_file_size_MB = 100 self.securest_log_files_backup_count = 20 self.amqp_username = "******" self.amqp_password = "******" self.events_and_logs_dir = path.join(self.test_working_dir, "tests-events-and-logs") os.mkdir(self.events_and_logs_dir) def create(self): try: logger.info("Setting up test environment... workdir=[{0}]".format(self.test_working_dir)) # events/logs polling start_events_and_logs_polling(logs_handler_retriever=self._logs_handler_retriever) self.start_elasticsearch() self.start_riemann() self.start_fileserver() self.start_manager_rest() self.create_management_worker() except BaseException as error: s_traceback = StringIO.StringIO() traceback.print_exc(file=s_traceback) logger.error("Error in test environment setup: %s", error) logger.error(s_traceback.getvalue()) self.destroy() raise def create_management_worker(self): mock_plugins_path = os.path.dirname(mock_plugins.__file__) os.environ["MOCK_PLUGINS_PATH"] = mock_plugins_path self.celery_management_worker_process = CeleryWorkerProcess( queues=["cloudify.management"], test_working_dir=self.test_working_dir, # these plugins are already installed. # so we just need to append to the includes. # note that these are not mocks, but the actual production # code plugins. additional_includes=[ "riemann_controller.tasks", "cloudify_system_workflows.deployment_environment", "cloudify.plugins.workflows", "diamond_agent.tasks", "script_runner.tasks", # modules in the agent intended for backwards compatibility "worker_installer.tasks", "windows_agent_installer.tasks", "plugin_installer.tasks", "windows_plugin_installer.tasks", ], # we need higher concurrency since # 'deployment_environment.create' calls # 'plugin_installer.install' as a sub-task # and they are both executed inside # this worker concurrency=2, ) # copy plugins to worker env mock_plugins_path = os.path.dirname(mock_plugins.__file__) shutil.copytree( src=mock_plugins_path, dst=self.celery_management_worker_process.envdir, ignore=shutil.ignore_patterns("*.pyc"), ) def start_riemann(self): riemann_config_path = self._get_riemann_config() libs_path = self._get_libs_path() self.riemann_process = RiemannProcess(riemann_config_path, libs_path) self.riemann_process.start() def start_manager_rest(self): from manager_rest.file_server import PORT as FS_PORT file_server_base_uri = "http://localhost:{0}".format(FS_PORT) self.manager_rest_process = ManagerRestProcess( MANAGER_REST_PORT, self.fileserver_dir, file_server_base_uri, FILE_SERVER_BLUEPRINTS_FOLDER, FILE_SERVER_UPLOADED_BLUEPRINTS_FOLDER, FILE_SERVER_RESOURCES_URI, self.rest_service_log_level, self.rest_service_log_path, self.rest_service_log_file_size_MB, self.rest_service_log_files_backup_count, self.securest_log_level, self.securest_log_file, self.securest_log_file_size_MB, self.securest_log_files_backup_count, self.test_working_dir, self.amqp_username, self.amqp_password, ) self.manager_rest_process.start() def start_elasticsearch(self): # elasticsearch self.elasticsearch_process = ElasticSearchProcess() self.elasticsearch_process.start() def start_fileserver(self): # workaround to update path manager_rest_path = path.dirname(path.dirname(path.dirname(__file__))) manager_rest_path = path.join(manager_rest_path, "rest-service") sys.path.append(manager_rest_path) os.mkdir(self.fileserver_dir) from manager_rest.file_server import FileServer from manager_rest.utils import copy_resources self.file_server_process = FileServer(self.fileserver_dir) self.file_server_process.start() # copy resources (base yaml etc) resources_path = path.abspath(__file__) resources_path = path.dirname(resources_path) resources_path = path.dirname(resources_path) resources_path = path.dirname(resources_path) resources_path = path.join(resources_path, "resources") copy_resources(self.fileserver_dir, resources_path) self.patch_source_urls(self.fileserver_dir) def destroy(self): logger.info("Destroying test environment...") if self.riemann_process: self.riemann_process.close() if self.elasticsearch_process: self.elasticsearch_process.close() if self.manager_rest_process: self.manager_rest_process.close() if self.file_server_process: self.file_server_process.stop() self.delete_working_directory() def delete_working_directory(self): if os.path.exists(self.test_working_dir): logger.info("Deleting test environment from: %s", self.test_working_dir) # shutil.rmtree(self.test_working_dir, ignore_errors=True) def handle_logs(self, output, event): pass def _logs_handler_retriever(self): return self.handle_logs @classmethod def _get_riemann_config(cls): manager_dir = cls._get_manager_root() plugins_dir = os.path.join(manager_dir, "plugins") riemann_dir = os.path.join(plugins_dir, "riemann-controller") package_dir = os.path.join(riemann_dir, "riemann_controller") resources_dir = os.path.join(package_dir, "resources") manager_config = os.path.join(resources_dir, "manager.config") return manager_config @classmethod def _get_libs_path(cls): return path.join(cls._get_manager_root(), ".libs") @staticmethod def reset_elasticsearch_data(): global testenv_instance testenv_instance.elasticsearch_process.reset_data() @staticmethod def stop_celery_management_worker(): global testenv_instance testenv_instance.celery_management_worker_process.stop() @staticmethod def read_celery_management_logs(): global testenv_instance process = testenv_instance.celery_management_worker_process return process.try_read_logfile() @classmethod def stop_all_celery_processes(cls): logger.info("Shutting down all celery processes") os.system("pkill -9 -f 'celery worker'") @staticmethod def start_celery_management_worker(): global testenv_instance testenv_instance.celery_management_worker_process.start() @staticmethod def riemann_cleanup(): global testenv_instance shutil.rmtree(TestEnvironment.riemann_workdir()) os.mkdir(TestEnvironment.riemann_workdir()) testenv_instance.riemann_process.restart() @staticmethod def riemann_workdir(): global testenv_instance return testenv_instance.celery_management_worker_process.riemann_config_dir @staticmethod def _get_manager_root(): init_file = __file__ testenv_dir = dirname(init_file) tests_dir = dirname(testenv_dir) manager_dir = dirname(tests_dir) return manager_dir @staticmethod def patch_source_urls(resources): with open(path.join(resources, "cloudify", "types", "types.yaml")) as f: types_yaml = yaml.safe_load(f.read()) for policy_type in types_yaml.get("policy_types", {}).values(): in_path = "/cloudify/policies/" source = policy_type["source"] if in_path in source: source = source[source.index(in_path) + 1 :] policy_type["source"] = source for policy_trigger in types_yaml.get("policy_triggers", {}).values(): in_path = "/cloudify/triggers/" source = policy_trigger["source"] if in_path in source: source = source[source.index(in_path) + 1 :] policy_trigger["source"] = source with open(path.join(resources, "cloudify", "types", "types.yaml"), "w") as f: f.write(yaml.safe_dump(types_yaml))
class TestEnvironment(object): manager_rest_process = None elasticsearch_process = None riemann_process = None file_server_process = None celery_management_worker_process = None def __init__(self, test_working_dir): super(TestEnvironment, self).__init__() self.test_working_dir = test_working_dir self.fileserver_dir = path.join(self.test_working_dir, 'fileserver') def create(self): try: logger.info('Setting up test environment... workdir=[{0}]' .format(self.test_working_dir)) # events/logs polling start_events_and_logs_polling() self.start_elasticsearch() self.start_riemann() self.start_fileserver() self.start_manager_rest() self.create_management_worker() except BaseException as error: s_traceback = StringIO.StringIO() traceback.print_exc(file=s_traceback) logger.error("Error in test environment setup: %s", error) logger.error(s_traceback.getvalue()) self.destroy() raise def create_management_worker(self): self.celery_management_worker_process = CeleryWorkerProcess( queues=['cloudify.management'], test_working_dir=self.test_working_dir, # these plugins are already installed. # so we just need to append to the includes. # note that these are not mocks, but the actual production # code plugins. additional_includes=[ 'riemann_controller.tasks', 'cloudify_system_workflows.deployment_environment', 'cloudify.plugins.workflows', 'diamond_agent.tasks', 'script_runner.tasks' ], # we need higher concurrency since # 'deployment_environment.create' calls # 'plugin_installer.install' as a sub-task # and they are both executed inside # this worker concurrency=2 ) # copy plugins to worker env import mock_plugins mock_plugins_path = os.path.dirname(mock_plugins.__file__) shutil.copytree( src=mock_plugins_path, dst=self.celery_management_worker_process.envdir, ignore=shutil.ignore_patterns('*.pyc') ) def start_riemann(self): riemann_config_path = self._get_riemann_config() libs_path = self._get_libs_path() self.riemann_process = RiemannProcess(riemann_config_path, libs_path) self.riemann_process.start() def start_manager_rest(self): from manager_rest.file_server import PORT as FS_PORT file_server_base_uri = 'http://localhost:{0}'.format(FS_PORT) self.manager_rest_process = ManagerRestProcess( MANAGER_REST_PORT, self.fileserver_dir, file_server_base_uri, FILE_SERVER_BLUEPRINTS_FOLDER, FILE_SERVER_UPLOADED_BLUEPRINTS_FOLDER, FILE_SERVER_RESOURCES_URI, self.test_working_dir) self.manager_rest_process.start() def start_elasticsearch(self): # elasticsearch self.elasticsearch_process = ElasticSearchProcess() self.elasticsearch_process.start() def start_fileserver(self): # workaround to update path manager_rest_path = \ path.dirname(path.dirname(path.dirname(__file__))) manager_rest_path = path.join(manager_rest_path, 'rest-service') sys.path.append(manager_rest_path) os.mkdir(self.fileserver_dir) from manager_rest.file_server import FileServer from manager_rest.util import copy_resources self.file_server_process = FileServer(self.fileserver_dir) self.file_server_process.start() # copy resources (base yaml etc) resources_path = path.abspath(__file__) resources_path = path.dirname(resources_path) resources_path = path.dirname(resources_path) resources_path = path.dirname(resources_path) resources_path = path.join(resources_path, 'resources') copy_resources(self.fileserver_dir, resources_path) self.patch_source_urls(self.fileserver_dir) def destroy(self): logger.info('Destroying test environment...') if self.riemann_process: self.riemann_process.close() if self.elasticsearch_process: self.elasticsearch_process.close() if self.manager_rest_process: self.manager_rest_process.close() if self.file_server_process: self.file_server_process.stop() self.delete_working_directory() def delete_working_directory(self): if os.path.exists(self.test_working_dir): logger.info('Deleting test environment from: %s', self.test_working_dir) shutil.rmtree(self.test_working_dir, ignore_errors=True) @classmethod def _get_riemann_config(cls): manager_dir = cls._get_manager_root() plugins_dir = os.path.join(manager_dir, 'plugins') riemann_dir = os.path.join(plugins_dir, 'riemann-controller') package_dir = os.path.join(riemann_dir, 'riemann_controller') resources_dir = os.path.join(package_dir, 'resources') manager_config = os.path.join(resources_dir, 'manager.config') return manager_config @classmethod def _get_libs_path(cls): return path.join(cls._get_manager_root(), '.libs') @staticmethod def reset_elasticsearch_data(): global testenv_instance testenv_instance.elasticsearch_process.reset_data() @staticmethod def stop_celery_management_worker(): global testenv_instance testenv_instance.celery_management_worker_process.stop() @staticmethod def start_celery_management_worker(): global testenv_instance testenv_instance.celery_management_worker_process.start() @staticmethod def riemann_workdir(): global testenv_instance return testenv_instance.\ celery_management_worker_process.\ riemann_config_dir @staticmethod def _get_manager_root(): init_file = __file__ testenv_dir = dirname(init_file) tests_dir = dirname(testenv_dir) manager_dir = dirname(tests_dir) return manager_dir @staticmethod def patch_source_urls(resources): with open(path.join(resources, 'cloudify', 'types', 'types.yaml')) as f: types_yaml = yaml.safe_load(f.read()) for policy_type in types_yaml.get('policy_types', {}).values(): in_path = '/cloudify/policies/' source = policy_type['source'] if in_path in source: source = source[source.index(in_path) + 1:] policy_type['source'] = source for policy_trigger in types_yaml.get('policy_triggers', {}).values(): in_path = '/cloudify/triggers/' source = policy_trigger['source'] if in_path in source: source = source[source.index(in_path) + 1:] policy_trigger['source'] = source with open(path.join(resources, 'cloudify', 'types', 'types.yaml'), 'w') as f: f.write(yaml.safe_dump(types_yaml))
class TestEnvironment(object): manager_rest_process = None elasticsearch_process = None riemann_process = None file_server_process = None celery_management_worker_process = None def __init__(self, test_working_dir): super(TestEnvironment, self).__init__() self.test_working_dir = test_working_dir self.plugins_storage_dir = os.path.join( self.test_working_dir, 'plugins-storage' ) os.makedirs(self.plugins_storage_dir) self.fileserver_dir = path.join(self.test_working_dir, 'fileserver') self.maintenance_folder = path.join(self.test_working_dir, 'maintenance') self.rest_service_log_level = 'DEBUG' self.rest_service_log_path = path.join( self.test_working_dir, 'cloudify-rest-service.log') self.rest_service_log_file_size_MB = 100 self.rest_service_log_files_backup_count = 20 self.securest_log_level = 'DEBUG' self.securest_log_file = path.join( self.test_working_dir, 'rest-security-audit.log') self.securest_log_file_size_MB = 100 self.securest_log_files_backup_count = 20 self.amqp_username = '******' self.amqp_password = '******' self.events_and_logs_dir = \ path.join(self.test_working_dir, 'tests-events-and-logs') os.mkdir(self.events_and_logs_dir) def create(self): try: logger.info('Setting up test environment... workdir=[{0}]' .format(self.test_working_dir)) # events/logs polling start_events_and_logs_polling( logs_handler_retriever=self._logs_handler_retriever) self.start_elasticsearch() self.start_riemann() self.start_fileserver() self.start_manager_rest() self.create_management_worker() except BaseException as error: s_traceback = StringIO.StringIO() traceback.print_exc(file=s_traceback) logger.error("Error in test environment setup: %s", error) logger.error(s_traceback.getvalue()) self.destroy() raise def create_management_worker(self): mock_plugins_path = os.path.dirname(mock_plugins.__file__) os.environ['MOCK_PLUGINS_PATH'] = mock_plugins_path self.celery_management_worker_process = CeleryWorkerProcess( queues=['cloudify.management'], test_working_dir=self.test_working_dir, # we need high concurrency since all management and # central deployment operations/workflow will be executed # by this worker concurrency=10 ) # copy plugins to worker env mock_plugins_path = os.path.dirname(mock_plugins.__file__) shutil.copytree( src=mock_plugins_path, dst=self.celery_management_worker_process.envdir, ignore=shutil.ignore_patterns('*.pyc') ) def start_riemann(self): riemann_config_path = self._get_riemann_config() libs_path = self._get_libs_path() self.riemann_process = RiemannProcess(riemann_config_path, libs_path) self.riemann_process.start() def start_manager_rest(self): from manager_rest.file_server import PORT as FS_PORT file_server_base_uri = 'http://*****:*****@timestamp'] = timestamp es_client = Elasticsearch() doc_type = event['type'] # simulate log index res = es_client.index(index=index, doc_type=doc_type, body=event) if not res['created']: raise Exception('failed to write to elasticsearch') self.handle_logs = es_log_handler def _logs_handler_retriever(self): return self.handle_logs @classmethod def _get_riemann_config(cls): manager_dir = cls._get_manager_root() plugins_dir = os.path.join(manager_dir, 'plugins') riemann_dir = os.path.join(plugins_dir, 'riemann-controller') package_dir = os.path.join(riemann_dir, 'riemann_controller') resources_dir = os.path.join(package_dir, 'resources') manager_config = os.path.join(resources_dir, 'manager.config') return manager_config @classmethod def _get_libs_path(cls): return path.join(cls._get_manager_root(), '.libs') @staticmethod def reset_elasticsearch_data(): global testenv_instance testenv_instance.elasticsearch_process.reset_data() @staticmethod def stop_celery_management_worker(): global testenv_instance testenv_instance.celery_management_worker_process.stop() @staticmethod def read_celery_management_logs(): global testenv_instance process = testenv_instance.celery_management_worker_process return process.try_read_logfile() @classmethod def stop_all_celery_processes(cls): logger.info('Shutting down all celery processes') os.system("pkill -9 -f 'celery worker'") @classmethod def stop_dispatch_processes(cls): logger.info('Shutting down all dispatch processes') os.system("pkill -9 -f 'cloudify/dispatch.py'") @staticmethod def start_celery_management_worker(): global testenv_instance testenv_instance.celery_management_worker_process.start() @staticmethod def riemann_cleanup(): global testenv_instance shutil.rmtree(TestEnvironment.riemann_workdir()) os.mkdir(TestEnvironment.riemann_workdir()) testenv_instance.riemann_process.restart() @staticmethod def riemann_workdir(): global testenv_instance return testenv_instance.\ celery_management_worker_process.\ riemann_config_dir @staticmethod def _get_manager_root(): init_file = __file__ testenv_dir = dirname(init_file) tests_dir = dirname(testenv_dir) manager_dir = dirname(tests_dir) return manager_dir @staticmethod def patch_source_urls(resources): with open(path.join(resources, 'cloudify', 'types', 'types.yaml')) as f: types_yaml = yaml.safe_load(f.read()) for policy_type in types_yaml.get('policy_types', {}).values(): in_path = '/cloudify/policies/' source = policy_type['source'] if in_path in source: source = source[source.index(in_path) + 1:] policy_type['source'] = source for policy_trigger in types_yaml.get('policy_triggers', {}).values(): in_path = '/cloudify/triggers/' source = policy_trigger['source'] if in_path in source: source = source[source.index(in_path) + 1:] policy_trigger['source'] = source with open(path.join(resources, 'cloudify', 'types', 'types.yaml'), 'w') as f: f.write(yaml.safe_dump(types_yaml))
def __init__(self, scope, use_mock_workers_installation=True): try: TestEnvironmentScope.validate(scope) logger.info("Setting up test environment... [scope={0}]".format( scope)) self._scope = scope # temp directory self._tempdir = tempfile.mkdtemp(suffix="test", prefix="cloudify") self._plugins_tempdir = path.join(self._tempdir, "cosmo-work") self._riemann_tempdir = path.join(self._tempdir, "riemann") logger.info("Test environment will be stored in: %s", self._tempdir) if not path.exists(self._plugins_tempdir): os.makedirs(self._plugins_tempdir) if not path.exists(self._riemann_tempdir): os.makedirs(self._riemann_tempdir) # events/logs polling start_events_and_logs_polling() # riemann riemann_config_path = self._get_riemann_config() libs_path = self._get_libs_path() self._riemann_process = RiemannProcess(riemann_config_path, libs_path) self._riemann_process.start() # elasticsearch self._elasticsearch_process = ElasticSearchProcess() self._elasticsearch_process.start() # copy all plugins to app path try: import workflows # workflows/__init__.py(c) workflow_plugin_path = path.abspath(workflows.__file__) # workflows/ workflow_plugin_path = path.dirname(workflow_plugin_path) # package / egg folder workflow_plugin_path = path.dirname(workflow_plugin_path) except ImportError: # cloudify-manager/tests/plugins/__init__.py(c) workflow_plugin_path = path.abspath(plugins.__file__) # cloudify-manager/tests/plugins workflow_plugin_path = path.dirname(workflow_plugin_path) # cloudify-manager/tests workflow_plugin_path = path.dirname(workflow_plugin_path) # cloudify-manager workflow_plugin_path = path.dirname(workflow_plugin_path) # cloudify-manager/workflows workflow_plugin_path = path.join(workflow_plugin_path, 'workflows') plugins_path = path.dirname(path.realpath(plugins.__file__)) mock_workflow_plugins = path.dirname(path.realpath( mock_workflows.__file__)) app_path = path.join(self._tempdir, "plugins") # copying plugins if not use_mock_workers_installation: for plugin_path in [plugins_path, workflow_plugin_path]: logger.info("Copying %s to %s", plugin_path, app_path) distutils.dir_util.copy_tree(plugin_path, app_path) else: # copying plugins and mock workflows for plugin_path in [plugins_path, mock_workflow_plugins]: logger.info("Copying %s to %s", plugin_path, app_path) distutils.dir_util.copy_tree(plugin_path, app_path) # copying the actual default install/uninstall workflow # plugin manually workflow_plugin_workflows_path = path.join( workflow_plugin_path, 'workflows') app_workflows_path = path.join(app_path, 'workflows') logger.info("Copying %s to %s", workflow_plugin_workflows_path, app_workflows_path) distutils.dir_util.copy_tree( workflow_plugin_workflows_path, app_workflows_path) # celery operations worker # if using real worker installation workflow then 2 workers are # needed on the management queue num_of_management_workers = \ 1 if use_mock_workers_installation else 2 self._celery_operations_worker_process = \ CeleryOperationsWorkerProcess( self._tempdir, self._plugins_tempdir, MANAGER_REST_PORT, num_of_management_workers) self._celery_operations_worker_process.start() # celery workflows worker self._celery_workflows_worker_process = \ CeleryWorkflowsWorkerProcess( self._tempdir, self._plugins_tempdir, MANAGER_REST_PORT, use_mock_workers_installation) self._celery_workflows_worker_process.start() # workaround to update path manager_rest_path = \ path.dirname(path.dirname(path.dirname(__file__))) manager_rest_path = path.join(manager_rest_path, 'rest-service') sys.path.append(manager_rest_path) # file server fileserver_dir = path.join(self._tempdir, 'fileserver') os.mkdir(fileserver_dir) from manager_rest.file_server import FileServer from manager_rest.file_server import PORT as FS_PORT from manager_rest.util import copy_resources self._file_server_process = FileServer(fileserver_dir) self._file_server_process.start() # copy resources (base yaml/radials etc) resources_path = path.abspath(__file__) resources_path = path.dirname(resources_path) resources_path = path.dirname(resources_path) resources_path = path.dirname(resources_path) resources_path = path.join(resources_path, 'resources') copy_resources(fileserver_dir, resources_path) # manager rest file_server_base_uri = 'http://localhost:{0}'.format(FS_PORT) self._manager_rest_process = ManagerRestProcess( MANAGER_REST_PORT, fileserver_dir, file_server_base_uri, FILE_SERVER_BLUEPRINTS_FOLDER, FILE_SERVER_UPLOADED_BLUEPRINTS_FOLDER, FILE_SERVER_RESOURCES_URI, self._tempdir) self._manager_rest_process.start() except BaseException as error: logger.error("Error in test environment setup: %s", error) self._destroy() raise