def test_update_execution_status(self): dsl_path = resource("dsl/basic.yaml") _, execution_id = self.deploy_application(dsl_path, wait_for_execution=True) execution = self.client.executions.get(execution_id) self.assertEquals(Execution.TERMINATED, execution.status) # Manually updating the status, because the client checks for # correct transitions postgresql.run_query( "UPDATE executions SET status='started' " "WHERE id='{0}'".format(execution_id) ) execution = self.client.executions.get(execution_id) self.assertEquals(Execution.STARTED, execution.status) execution = self.client.executions.update(execution_id, 'pending', 'some-error') self.assertEquals(Execution.PENDING, execution.status) self.assertEquals('some-error', execution.error) # verifying that updating only the status field also resets the # error field to an empty string execution = self.client.executions.update(execution_id, Execution.TERMINATED) self.assertEquals(Execution.TERMINATED, execution.status) self.assertEquals('', execution.error)
def test_update_execution_status(self): dsl_path = resource("dsl/basic.yaml") _, execution_id = self.deploy_application(dsl_path, wait_for_execution=True) execution = self.client.executions.get(execution_id) self.assertEquals(Execution.TERMINATED, execution.status) # Manually updating the status, because the client checks for # correct transitions postgresql.run_query( "UPDATE executions SET status='started' " "WHERE id='{0}'".format(execution_id) ) execution = self.client.executions.get(execution_id) self.assertEquals(Execution.STARTED, execution.status) execution = self.client.executions.update(execution_id, 'pending', 'some-error') self.assertEquals(Execution.PENDING, execution.status) self.assertEquals('some-error', execution.error) # verifying that updating only the status field also resets the # error field to an empty string execution = self.client.executions.update(execution_id, Execution.TERMINATED) self.assertEquals(Execution.TERMINATED, execution.status) self.assertEquals('', execution.error)
def test_v_5_0_5_restore_snapshot_with_executions(self): """ Validate the restore of new DB fields added in 5.0 or 5.0.5 which relate to deployments and executions """ snapshot_path = self._get_snapshot('snap_5.0.5_with_executions.zip') self._upload_and_restore_snapshot(snapshot_path) deployments = postgresql.run_query( "SELECT id, runtime_only_evaluation FROM deployments;")['all'] self.assertEqual(set(deployments[0]), {'hello-world', False}) executions = postgresql.run_query("SELECT workflow_id, blueprint_id " "FROM executions;")['all'] # executions of `create_deployment_environment` and `install` have # blueprint ids self.assertEqual(executions[0][0], 'install') self.assertEqual(executions[0][1], 'hello-world') self.assertEqual(executions[2][0], 'create_deployment_environment') self.assertEqual(executions[2][1], 'hello-world') # index added in 5.0.5. with only one instance per node, all indexes=1 instances = postgresql.run_query("SELECT index " "FROM node_instances;")['all'] for instance in instances: self.assertEqual(instance[0], 1)
def setUp(self): """Update postgres timezone and create a deployment.""" super(TimezoneTest, self).setUp() postgres_conf = get_postgres_conf() run_query( self.env.container_id, "ALTER USER {} SET TIME ZONE '{}'" .format(postgres_conf.username, self.TIMEZONE) )
def setUpClass(cls): """Configure database timezone.""" super(EventsAlternativeTimezoneTest, cls).setUpClass() # Container is launched once per unittest.TestCase class. # Timezone configuration just needs to updated at the class level. # Between tests cases tables are re-created, # but timezone configuration is preserved. postgres_conf = get_postgres_conf() run_query("ALTER USER {} SET TIME ZONE '{}'".format( postgres_conf.username, cls.TIMEZONE))
def setUpClass(cls): """Configure database timezone.""" super(EventsAlternativeTimezoneTest, cls).setUpClass() # Container is launched once per unittest.TestCase class. # Timezone configuration just needs to updated at the class level. # Between tests cases tables are re-created, # but timezone configuration is preserved. postgres_conf = get_postgres_conf() run_query( "ALTER USER {} SET TIME ZONE '{}'" .format(postgres_conf.username, cls.TIMEZONE) )
def test_v_5_0_5_restore_snapshot(self): """ Validate the restore of new DB fields added in 5.0 or 5.0.5 """ snapshot_path = \ self._get_snapshot('snap_5.0.5_with_updated_deployment.zip') self._upload_and_restore_snapshot(snapshot_path) managers = postgresql.run_query("SELECT node_id FROM managers;")['all'] self.assertGreater(len(managers[0][0]), 10) brokers = postgresql.run_query("SELECT is_external, node_id " "FROM rabbitmq_brokers;")['all'] self.assertFalse(brokers[0][0]) self.assertGreater(len(brokers[0][1]), 10)
def setUpClass(cls): """Configure database timezone.""" super(EventsAlternativeTimezoneTest, cls).setUpClass() # Container is launched once per unittest.TestCase class. # Timezone configuration just needs to updated at the class level. # Between tests cases tables are re-created, # but timezone configuration is preserved. postgres_conf = get_postgres_conf() run_query("ALTER USER {} SET TIME ZONE '{}'".format( postgres_conf.username, cls.TIMEZONE)) # restart all users of the db so that they get a new session which # uses the just-set timezone docl.execute( "systemctl restart cloudify-amqp-postgres cloudify-restservice")
def setUp(self): """Update postgres timezone and create a deployment.""" # Make sure that database timezone is correctly set query_result = run_query('SHOW TIME ZONE') self.assertEqual(query_result['all'][0][0], self.TIMEZONE) super(TimezoneTest, self).setUp()
def setUp(self): """Update postgres timezone and create a deployment.""" # Make sure that database timezone is correctly set query_result = run_query('SHOW TIME ZONE') self.assertEqual(query_result['all'][0][0], self.TIMEZONE) super(TimezoneTest, self).setUp()
def setUp(self): """Update postgres timezone and create a deployment.""" # Make sure that database timezone is correctly set query_result = run_query('SHOW TIME ZONE') self.assertEqual(query_result['all'][0][0], self.TIMEZONE) self.start_timestamp = datetime.utcnow().isoformat() super(EventsAlternativeTimezoneTest, self).setUp() self.stop_timestamp = datetime.utcnow().isoformat()
def setUp(self): """Update postgres timezone and create a deployment.""" # Make sure that database timezone is correctly set query_result = run_query('SHOW TIME ZONE') self.assertEqual(query_result['all'][0][0], self.TIMEZONE) self.start_timestamp = datetime.utcnow().isoformat() super(EventsAlternativeTimezoneTest, self).setUp() # log storing is async, add a few seconds to allow for that self.stop_timestamp = \ (datetime.utcnow() + timedelta(seconds=3)).isoformat()
def test_v_5_0_5_restore_snapshot_and_inter_deployment_dependencies(self): snapshot_path = self._get_snapshot( 'snap_5.0.5_with_component_openstack.zip') self._upload_and_restore_snapshot(snapshot_path) inter_deployment_dependencies = postgresql.run_query( "SELECT _source_deployment, _target_deployment, " "dependency_creator FROM inter_deployment_dependencies;")['all'] assert (self._openstack_inter_deployment_dependencies()).issubset( set(inter_deployment_dependencies)) self._assert_component_listed(inter_deployment_dependencies)
def test_v_4_5_5_restore_snapshot_with_executions(self): """ Validate the restore of executions """ snapshot_path = self._get_snapshot('snap_4.5.5_with_executions.zip') self._upload_and_restore_snapshot(snapshot_path) result = postgresql.run_query("SELECT workflow_id, token " "FROM executions;") # The executions from the snapshot don't have a token for execution in result['all'][:3]: self.assertIsNone(execution[1]) # The execution of the restore snapshot has a token token_4 = result['all'][5][1] self.assertIsNotNone(token_4) self.assertGreater(len(token_4), 10) self.assertEqual(result['all'][5][0], 'restore_snapshot')
def test_v_4_5_5_restore_snapshot_with_executions(self): """ Validate the restore of executions """ snapshot_path = self._get_snapshot('snap_4.5.5_with_executions.zip') self._upload_and_restore_snapshot(snapshot_path) result = postgresql.run_query("SELECT workflow_id, token " "FROM executions;") # The executions from the snapshot don't have a token for execution in result['all'][:3]: self.assertIsNone(execution[1]) # The execution of the restore snapshot has a token token_4 = result['all'][5][1] self.assertIsNotNone(token_4) self.assertGreater(len(token_4), 10) self.assertEqual(result['all'][5][0], 'restore_snapshot')
def test_v_5_0_5_restore_snapshot_with_updated_deployment(self): """ Validate the restore of an updated deployment, with DB fields added in 5.0 or 5.0.5 """ snapshot_path = \ self._get_snapshot('snap_5.0.5_with_updated_deployment.zip') self._upload_and_restore_snapshot(snapshot_path) dep_updates = postgresql.run_query( "SELECT central_plugins_to_install, central_plugins_to_uninstall," "runtime_only_evaluation FROM deployment_updates;")['all'] plugins_to_install = pickle.loads(dep_updates[0][0]) plugins_to_uninstall = pickle.loads(dep_updates[0][1]) runtime_only_evaluation = dep_updates[0][2] for plug in plugins_to_install: self.assertEqual(plug['package_name'], 'cloudify-utilities-plugin') self.assertEqual(plug['package_version'], '1.14.0') self.assertListEqual(plugins_to_uninstall, []) self.assertFalse(runtime_only_evaluation)
def _test_secrets_restored(self, snapshot_name): snapshot_path = self._get_snapshot(snapshot_name) self._upload_and_restore_snapshot(snapshot_path) # The secrets values as in the snapshot secrets = self.client.secrets.list(_include=['key']) assert len(secrets) == 3 secret_string = self.client.secrets.get('sec1') secret_file = self.client.secrets.get('sec3') assert secret_string.value == 'top_secret' assert 'test_mail' in secret_file.value # Validate the value is encrypted in the DB result = postgresql.run_query("SELECT value " "FROM secrets " "WHERE id='sec1';") secret_encrypted = result['all'][0][0] assert secret_encrypted != 'top_secret' # The secrets values are not hidden assert (not secret_string.is_hidden_value and not secret_file.is_hidden_value)
def _test_secrets_restored(self, snapshot_name): snapshot_path = self._get_snapshot(snapshot_name) self._upload_and_restore_snapshot(snapshot_path) # The secrets values as in the snapshot secrets = self.client.secrets.list(_include=['key']) assert len(secrets) == 3 secret_string = self.client.secrets.get('sec1') secret_file = self.client.secrets.get('sec3') assert secret_string.value == 'top_secret' assert 'test_mail' in secret_file.value # Validate the value is encrypted in the DB result = postgresql.run_query("SELECT value " "FROM secrets " "WHERE id='sec1';") secret_encrypted = result['all'][0][0] assert secret_encrypted != 'top_secret' # The secrets values are not hidden assert (not secret_string.is_hidden_value and not secret_file.is_hidden_value)
def setUp(self): super(TestLicense, self).setUp() postgresql.run_query("DELETE FROM licenses")
def delete_provider_context(): postgresql.run_query('DELETE from provider_context')
def _manually_update_execution_status(new_status, id): run_query("UPDATE executions SET status = '{0}' WHERE id = '{1}'" .format(new_status, id))
def _manually_update_execution_status(new_status, id): run_query( "UPDATE executions SET status = '{0}' WHERE id = '{1}'".format( new_status, id))
def setUp(self): super(TestLicense, self).setUp() postgresql.run_query("DELETE FROM licenses")