def start(self): process = CeleryWorkerProcess( queues=[self.agent_queue], name=self.agent_name, test_working_dir=os.environ['TEST_WORKING_DIR'], additional_includes=self._build_additional_includes()) process.start()
def start(self): process = CeleryWorkerProcess( queues=[self.agent_name], test_working_dir=os.environ['TEST_WORKING_DIR'], additional_includes=self._build_additional_includes() ) process.start()
def create(self): process = CeleryWorkerProcess( queues=[self.agent_queue], name=self.agent_name, test_working_dir=os.environ['TEST_WORKING_DIR']) process.create_dirs() self._install_plugin('cloudify_agent', process.envdir)
def stop(self): process = CeleryWorkerProcess( queues=[self.agent_queue], name=self.agent_name, test_working_dir=os.environ['TEST_WORKING_DIR'] ) process.stop()
def test_plugin_workdir(self): filename = 'test_plugin_workdir.txt' host_content = 'HOST_CONTENT' central_content = 'CENTRAL_CONTENT' dsl_path = resource("dsl/plugin_workdir.yaml") deployment, _ = deploy(dsl_path, inputs={ 'filename': filename, 'host_content': host_content, 'central_content': central_content }) host_id = self.client.node_instances.list(node_id='host').items[0].id from testenv import testenv_instance test_workdir = testenv_instance.test_working_dir central_agent = CeleryWorkerProcess(['cloudify.management'], test_workdir) host_agent = CeleryWorkerProcess([host_id], test_workdir) central_file = os.path.join(central_agent.workdir, 'deployments', deployment.id, 'plugins', 'testmockoperations', filename) host_file = os.path.join(host_agent.workdir, 'plugins', 'testmockoperations', filename) with open(central_file) as f: self.assertEqual(central_content, f.read()) with open(host_file) as f: self.assertEqual(host_content, f.read())
def create(self): process = CeleryWorkerProcess( queues=[self.agent_queue], name=self.agent_name, test_working_dir=os.environ['TEST_WORKING_DIR'] ) process.create_dirs() self._install_plugin('cloudify_agent', process.envdir)
def get_plugin_data(self, plugin_name, deployment_id=None): """ Retrieve the plugin state for a curtain deployment. :param deployment_id: the deployment id in question. :param plugin_name: the plugin in question. :return: plugin data relevant for the deployment. :rtype dict """ global testenv_instance # create worker instance to # get the workdir worker = CeleryWorkerProcess( queues=['cloudify.management'], test_working_dir=testenv_instance.test_working_dir ) return self._get_plugin_data( plugin_name=plugin_name, deployment_id=deployment_id, worker_work_dir=worker.workdir )
def install(self): ctx.logger.info('Installing worker {0}' .format(self.agent_name)) # process based agent start with # an empty virtualenv. # we therefore need to copy the plugin # installer to the worker env folder. process = CeleryWorkerProcess( queues=[self.agent_name], test_working_dir=os.environ['TEST_WORKING_DIR'] ) process.create_dirs() self._install_plugin('plugin_installer', process.envdir) self._install_plugin('worker_installer', process.envdir) ctx.logger.info('Installed worker {0}'.format(self.agent_name))
def create_management_worker(self): mock_plugins_path = os.path.dirname(mock_plugins.__file__) os.environ['MOCK_PLUGINS_PATH'] = mock_plugins_path self.celery_management_worker_process = CeleryWorkerProcess( queues=['cloudify.management'], test_working_dir=self.test_working_dir, # we need high concurrency since all management and # central deployment operations/workflow will be executed # by this worker concurrency=10) # copy plugins to worker env mock_plugins_path = os.path.dirname(mock_plugins.__file__) shutil.copytree(src=mock_plugins_path, dst=self.celery_management_worker_process.envdir, ignore=shutil.ignore_patterns('*.pyc'))
def create_management_worker(self): mock_plugins_path = os.path.dirname(mock_plugins.__file__) os.environ['MOCK_PLUGINS_PATH'] = mock_plugins_path self.celery_management_worker_process = CeleryWorkerProcess( queues=['cloudify.management'], test_working_dir=self.test_working_dir, # these plugins are already installed. # so we just need to append to the includes. # note that these are not mocks, but the actual production # code plugins. additional_includes=[ 'riemann_controller.tasks', 'cloudify_system_workflows.deployment_environment', 'cloudify.plugins.workflows', 'diamond_agent.tasks', 'script_runner.tasks', # modules in the agent intended for backwards compatibility 'worker_installer.tasks', 'windows_agent_installer.tasks', 'plugin_installer.tasks', 'windows_plugin_installer.tasks', ], # we need higher concurrency since # 'deployment_environment.create' calls # 'plugin_installer.install' as a sub-task # and they are both executed inside # this worker concurrency=2) # copy plugins to worker env mock_plugins_path = os.path.dirname(mock_plugins.__file__) shutil.copytree(src=mock_plugins_path, dst=self.celery_management_worker_process.envdir, ignore=shutil.ignore_patterns('*.pyc'))
def stop(self): process = CeleryWorkerProcess( queues=[self.agent_queue], name=self.agent_name, test_working_dir=os.environ['TEST_WORKING_DIR']) process.stop()