def plugin_already_uploaded(wagon_path): """ Check if a plugin is already loaded on the manager. :param wagon_path: Path to a wagon. :return: Bool. """ # It`s url if '://' in wagon_path: wagon_metadata = show(wagon_path) else: wagon_metadata = show(find_wagon_local_path(wagon_path)) plugin_name = wagon_metadata["package_name"] plugin_version = wagon_metadata["package_version"] plugin_distribution = \ wagon_metadata["build_server_os_properties"]["distribution"] for plugin in cloudify_exec('cfy plugins list'): logger.info('CHECKING if {0} {1} {2} in {3}'.format( plugin_name, plugin_version, plugin_distribution, plugin)) compare_name = plugin['package_name'] compare_version = plugin['package_version'] compare_distro = plugin.get('distribution', '').lower() or plugin.get( 'yaml_url_path', '') if plugin_name.replace('_', '-') in compare_name and \ plugin_version == compare_version and \ plugin_distribution.lower() in compare_distro: return True
def find_executions_to_cancel(deployment_id): """ Find all the executions to cancel. :param deployment_id: :return: """ executions = executions_list(deployment_id) try: # Get all install and update executions filtered_executions = \ [e['id'] for e in executions if e['workflow_id'] in ['install', 'update'] and e['status'].lower() in ['pending', 'started']] # For debugging logger.info( "these are potential executions to cancel: {executions}".format( executions=filtered_executions)) except (IndexError, KeyError): logger.info( 'Workflows to cancel for deployment {dep_id} was not ' 'found.'.format( dep_id=deployment_id)) filtered_executions = [] return filtered_executions
def find_install_execution_to_resume(deployment_id): """ Find the last install execution to resume. :param deployment_id: :return: """ executions = executions_list(deployment_id) try: # Get the last install execution ex = [e for e in executions if 'install' == e['workflow_id']][-1] # For debugging logger.info("these are potential executions to resume") logger.info([e for e in executions if 'install' == e['workflow_id']]) except (IndexError, KeyError): raise EcosystemTestException( 'Workflow install to resume for deployment {dep_id} was not ' 'found.'.format( dep_id=deployment_id)) if ex['status'].lower() not in ['failed', 'cancelled']: raise EcosystemTestException( 'Found install execution with id: {id} but with status {status},' 'can`t resume this execution'.format( id=ex['id'], status=ex['status'])) return ex['id']
def secrets_create(name, is_file=False): """ Create a secret on the manager. :param name: The secret key. :param is_file: Whether to create the secret from a file. :return: """ logger.info('Creating secret: {0}.'.format(name)) try: value = (base64.b64decode( os.environ[name].encode('utf-8'))).decode('ascii') except KeyError: raise EcosystemTestException( 'Secret env var not set {0}.'.format(name)) if is_file: with NamedTemporaryFile(mode='w+', delete=True) as outfile: outfile.write(value) outfile.flush() cmd = 'cfy secrets create -u {0} -f {1}'.format( name, copy_file_to_docker(outfile.name)) return cloudify_exec(cmd, get_json=False, log=False) return cloudify_exec('cfy secrets create -u {0} -s {1}'.format( name, value), get_json=False, log=False)
def upload_test_plugins(plugins, plugin_test, execute_bundle_upload=True, workspace_path=None): """ Upload all plugins that we need to execute the test. :param plugins: A list of additional plugins to upload. (Like ones that are not in the bundle (Openstack 3, Host Pool). :param plugin_test: Whether to install plugins from workspace. :param execute_bundle_upload: Whether to install a bundle. :return: """ plugins = plugins or [] if plugin_test: for plugin_pair in get_test_plugins(): plugins.append(plugin_pair) if execute_bundle_upload: bundle_path = get_bundle_from_workspace(workspace_path=workspace_path) if bundle_path: cloudify_exec( 'cfy plugins bundle-upload --path {bundle_path}'.format( bundle_path=copy_file_to_docker(bundle_path)), get_json=False) else: cloudify_exec('cfy plugins bundle-upload', get_json=False) for plugin in plugins: sleep(3) output = plugins_upload(plugin[0], plugin[1]) logger.info('Uploaded plugin: {0}'.format(output)) logger.info('Plugins list: {0}'.format(cloudify_exec('cfy plugins list')))
def verify_endpoint(endpoint, endpoint_value): logger.info('Checking Endpoint.') conn = urlopen(endpoint) if conn.getcode() == endpoint_value: logger.info('Endpoint up!') return raise EcosystemTestException('Endpoint {e} not up {result}.'.format( e=endpoint, result=endpoint_value))
def run_user_defined_check(user_defined_check, user_defined_check_params): if user_defined_check: if callable(user_defined_check): logger.info('Run user defined check...') params = user_defined_check_params or {} user_defined_check(**params) else: raise EcosystemTestException('User defined check is not callable!')
def log_events(execution_id): """ Log events from execution. :param execution_id: :return: """ for event in events_list(execution_id): if event['context']['task_error_causes']: logger.info(event['context']['task_error_causes'])
def start_install_workflow(test_name, timeout): logger.info(GREEN + 'Installing...' + RESET) try: executions_list(test_name) executions_start('install', test_name, timeout) except EcosystemTimeout: # Give 5 seconds grace. executions_list(test_name) wait_for_execution(test_name, 'install', 10) else: wait_for_execution(test_name, 'install', timeout)
def create_test_secrets(secrets=None): """ Create secrets on the manager. :param secrets: :return: """ secrets = secrets or {} for secret, f in secrets.items(): secrets_create(secret, f) logger.info('Secrets list: {0}'.format(cloudify_exec('cfy secrets list')))
def dump_command_output(): if log: stdout_file.flush() for stdout_line in stdout_file_read.readlines(): logger.info(stdout_color + 'Execution output: {0}'.format(stdout_line) + RESET) stderr_file.flush() for stderr_line in stderr_file_read.readlines(): logger.error(RED + 'Execution error: {0}'.format(stderr_line) + RESET)
def resume_install_workflow(test_name, timeout): exec_id = find_install_execution_to_resume(test_name) logger.debug('execution to resume: {id}'.format(id=exec_id)) try: logger.info('resuming...') executions_resume(exec_id, timeout) except EcosystemTimeout: # Give 5 seconds grace. executions_list(test_name) wait_for_execution(test_name, 'install', 10) else: wait_for_execution(test_name, 'install', timeout)
def _basic_blueprint_test(blueprint_file_name, test_name, inputs=None, timeout=None, endpoint_name=None, endpoint_value=None): """ Simple blueprint install/uninstall test. :param blueprint_file_name: :param test_name: :param inputs: :param timeout: :return: """ timeout = timeout or TIMEOUT if inputs != '': inputs = inputs or os.path.join( os.path.dirname(blueprint_file_name), 'inputs/test-inputs.yaml') logger.info('Blueprints list: {0}'.format( cloudify_exec('cfy blueprints list'))) blueprints_upload(blueprint_file_name, test_name) logger.info('Deployments list: {0}'.format( cloudify_exec('cfy deployments list'))) deployments_create(test_name, inputs) sleep(5) logger.info(GREEN + 'Installing...' + RESET) try: executions_list(test_name) executions_start('install', test_name, timeout) except EcosystemTimeout: # Give 5 seconds grace. executions_list(test_name) wait_for_execution(test_name, 'install', 10) else: wait_for_execution(test_name, 'install', timeout) if endpoint_name and endpoint_value: verify_endpoint( get_deployment_output_by_name( test_name, endpoint_name ), endpoint_value) logger.info(BLUE + 'Uninstalling...' + RESET) executions_start('uninstall', test_name, timeout) wait_for_execution(test_name, 'uninstall', timeout) try: deployment_delete(test_name) blueprints_delete(test_name) except Exception as e: logger.info(RED + 'Failed to delete blueprint, {0}'.format(str(e)) + RESET)
def wait_for_execution(deployment_id, workflow_id, timeout): """ Wait for execution to end. :param deployment_id: :param workflow_id: :param timeout: :return: """ logger.info('Waiting for execution deployment ID ' '{0} workflow ID {1}'.format(deployment_id, workflow_id)) start = datetime.now() while True: if datetime.now() - start > timedelta(seconds=timeout): raise EcosystemTimeout('Test timed out.') executions = executions_list(deployment_id) try: ex = [e for e in executions if workflow_id == e['workflow_id']][-1] except (IndexError, KeyError): raise EcosystemTestException( 'Workflow {0} for deployment {1} was not found.'.format( workflow_id, deployment_id)) if ex['status'].lower() == 'completed': logger.info('{0}:{1} finished!'.format(deployment_id, workflow_id)) break elif ex['status'].lower() == 'pending' or ex['status'] == 'started': logger.info('{0}:{1} is pending/started.'.format( deployment_id, workflow_id)) elif ex['status'].lower() == 'failed': raise EcosystemTestException('Execution failed {0}:{1}'.format( deployment_id, workflow_id)) else: logger.info('Execution still running. Status: {0}'.format( ex['status'])) sleep(5)
def export_secret_to_environment(name): """ Add secret to envvar. :param name: The secret key. :return: """ logger.info('Adding envvar: {0}.'.format(name)) try: value = base64.b64decode(os.environ[name]) except KeyError: raise EcosystemTestException( 'Secret env var not set {0}.'.format(name)) if isinstance(value, bytes): value = value.decode(encoding='UTF-8') os.environ[name.upper()] = value
def plugins_upload(wagon_path, yaml_path): """ Upload a wagon and plugin YAML to the manager. :param wagon_path: Path to the wagon on the manager. :param yaml_path: Path to the YAML on the manager container. :return: Command output. """ logger.info('Uploading plugin: {0} {1}'.format(wagon_path, yaml_path)) if not plugin_already_uploaded(wagon_path): if os.path.exists(wagon_path): wagon_path = copy_file_to_docker(wagon_path) if os.path.exists(yaml_path): yaml_path = copy_file_to_docker(yaml_path) return cloudify_exec('cfy plugins upload {0} -y {1}'.format( wagon_path, yaml_path), get_json=False)
def license_upload(): """ Upload the license to the manager. :return: Command output. """ logger.info('Uploading manager license.') try: license = base64.b64decode(os.environ[LICENSE_ENVAR_NAME]) except KeyError: raise EcosystemTestException('License env var not set {0}.') file_temp = NamedTemporaryFile(delete=False) with open(file_temp.name, 'wb') as outfile: outfile.write(license) return cloudify_exec('cfy license upload {0}'.format( copy_file_to_docker(file_temp.name)), get_json=False)
def first_invocation_test_path(blueprint_file_name, test_name, inputs=None, timeout=None, uninstall_on_success=True, user_defined_check=None, user_defined_check_params=None ): logger.info('Blueprints list: {0}'.format( cloudify_exec('cfy blueprints list'))) blueprints_upload(blueprint_file_name, test_name) logger.info('Deployments list: {0}'.format( cloudify_exec('cfy deployments list'))) deployments_create(test_name, inputs) sleep(5) start_install_workflow(test_name, timeout) run_user_defined_check(user_defined_check, user_defined_check_params) if uninstall_on_success: handle_uninstall_on_success(test_name, timeout)
def handle_deployment_update(blueprint_file_name, update_bp_name, test_name, inputs, timeout): logger.info('updating deployment...') try: logger.info('Blueprints list: {0}'.format( cloudify_exec('cfy blueprints list'))) blueprints_upload(blueprint_file_name, update_bp_name) deployment_update(test_name, update_bp_name, inputs, timeout) except EcosystemTimeout: # Give 5 seconds grace. executions_list(test_name) wait_for_execution(test_name, 'update', 10) else: wait_for_execution(test_name, 'update', timeout)
def replace_plugin_package_on_manager(package_name, plugin_version, directory, python_version='python3.6'): """Replace plugin code in the manager's path. Example usage: https://github.com/cloudify-cosmo/ cloudify-vcloud-plugin/blob/75a9ab891edc249a7a7f82b0f855bd79fcd22d9e/ cicd/update_test_manager.py#L8 Then call the code like this: python .cicd/update_test_manager.py :param package_name: Name of a package. :param plugin_version: The plugin's version. :param directory: The plugin's directory. :param python_version: The python version name. :return: """ manager_package_path = \ '/opt/mgmtworker/env/plugins/default_tenant/' \ '{plugin}/{version}/lib/{python}/' \ 'site-packages/{package}'.format( package=package_name, version=plugin_version, python=python_version, plugin=directory.split('/')[-1] ) directory = os.path.join(directory, package_name) if not os.path.exists(directory): raise Exception('No such file or directory {}'.format(directory)) elif not os.path.isdir(directory): raise Exception( 'The directory provided {} is not a directory.'.format(directory)) logger.info('Replacing {s} on manager {d}'.format(s=directory, d=manager_package_path)) replace_file_on_manager(directory, manager_package_path) docker_exec( 'chown -R cfyuser:cfyuser {path}'.format(path=manager_package_path))
def upload_test_plugins_dev(plugins, execute_bundle_upload=True, bundle_path=None): """ Upload all plugins that we need to execute the test. :param plugins: A list of additional plugins to upload. (Like ones that are not in the bundle (Openstack 3, Host Pool). :param execute_bundle_upload: Whether to install a bundle. :param bundle_path: Path to plugins bundle. :return: """ plugins = plugins or [] bundle_path = bundle_path or '' if execute_bundle_upload: if os.path.isfile(bundle_path): logger.info("Using plugins bundle found at: {path}".format( path=bundle_path)) cloudify_exec( 'cfy plugins bundle-upload --path {bundle_path}'.format( bundle_path=copy_file_to_docker(bundle_path)), get_json=False) else: cloudify_exec('cfy plugins bundle-upload', get_json=False) for plugin in plugins: sleep(3) output = plugins_upload(plugin[0], plugin[1]) logger.info('Uploaded plugin: {0}'.format(output)) logger.info('Plugins list: {0}'.format(cloudify_exec('cfy plugins list')))
def prepare_inputs(inputs): logger.info("Preparing inputs...") if not inputs: yield elif type(inputs) is dict: with NamedTemporaryFile(mode='w+', delete=True) as outfile: yaml.dump(inputs, outfile, allow_unicode=False) logger.debug( "temporary inputs file path {p}".format(p=outfile.name)) inputs_on_docker = copy_file_to_docker(outfile.name) try: yield inputs_on_docker finally: delete_file_from_docker(inputs_on_docker) elif os.path.isfile(inputs): inputs_on_docker = copy_file_to_docker(inputs) try: yield inputs_on_docker finally: delete_file_from_docker(inputs_on_docker) else: # It's input string or None so yield it as is. yield inputs
def handle_test_failure(test_name, on_failure, timeout): """ rollback-full,rollback-partial,uninstall-force """ logger.info('Handling test failure...') executions_to_cancel = find_executions_to_cancel(test_name) if on_failure is DONOTHING: return elif on_failure is CANCEL: cancel_multiple_executions(executions_to_cancel, timeout, force=False) elif on_failure == ROLLBACK_FULL: cancel_multiple_executions(executions_to_cancel, timeout, force=False) executions_start('rollback', test_name, timeout, params='full_rollback=true') elif on_failure == ROLLBACK_PARTIAL: cancel_multiple_executions(executions_to_cancel, timeout, force=False) executions_start('rollback', test_name, timeout) elif on_failure == UNINSTALL_FORCE: cancel_multiple_executions(executions_to_cancel, timeout, force=False) cleanup_on_failure(test_name) else: raise EcosystemTestException('Wrong on_failure param supplied,' ' Doing nothing please clean resources on' ' the manager manually.')
def vpn(): """Run tests while VPN is executing. Does not actually work in circle ci :(""" logger.info('Starting VPN...') proc = docker_exec('openvpn {config_path}'.format( config_path=VPN_CONFIG_PATH), detach=True) # TODO: Find a way to poll the VPN without killing it. :( sleep(10) logger.info('VPN is supposed to be running...') try: yield proc except Exception as e: # TODO: Learn about potential Exceptions here. logger.info(RED + 'VPN error {0}'.format(str(e)) + RESET) pass # Apparently CircleCI does not support VPNs. !!!! finally: logger.info('Stopping VPN...') proc.terminate()
def handle_uninstall_on_success(test_name, timeout): logger.info(BLUE + 'Uninstalling...' + RESET) executions_start('uninstall', test_name, timeout) wait_for_execution(test_name, 'uninstall', timeout) blueprint_of_deployment = get_blueprint_id_of_deployment(test_name) logger.info( "Blueprint id of deployment {dep_id} is : {blueprint_id}".format( dep_id=test_name, blueprint_id=blueprint_of_deployment)) try: deployment_delete(test_name) blueprints_delete(blueprint_of_deployment) except Exception as e: logger.info(RED + 'Failed to delete blueprint, {0}'.format(str(e)) + RESET)
def is_first_invocation(test_name): """ Check if this is the first invocation of the test, by check existence of blueprint and deployment with test_name id. param: test_name: The test name. """ logger.info( 'Checking if {test_name} in deployments list '.format( test_name=test_name)) deployments_list = cloudify_exec('cfy deployments list') def _map_func(bl_or_dep_dict): return bl_or_dep_dict["id"] if test_name in [_map_func(deployment) for deployment in deployments_list]: logger.info('Not first invocation!') return False else: logger.info('First invocation!') return True
def use_cfy(timeout=60): """ Initialize the Cloudify CLI profile inside the container. :param timeout: :return: Command output. """ logger.info('Checking manager status.') start = datetime.now() # Give 10 sec of mercy for the container to boot sleep(10) while True: if datetime.now() - start > timedelta(seconds=timeout): raise EcosystemTestException('Fn use_cfy timed out.') try: output = cloudify_exec('cfy status', get_json=False) logger.info(output) except EcosystemTestException: sleep(10) logger.info('Manager is ready.') break
def handle_process(command, timeout=TIMEOUT, log=True, detach=False, stdout_color=DEFAULT_COLOR): file_obj_stdout = NamedTemporaryFile(delete=False) file_obj_stderr = NamedTemporaryFile(delete=False) stdout_file = open(file_obj_stdout.name, 'w') stdout_file_read = open(file_obj_stdout.name, 'r') stderr_file = open(file_obj_stderr.name, 'w') stderr_file_read = open(file_obj_stderr.name, 'r') popen_args = { 'args': split(command), 'stdout': stdout_file, 'stderr': stderr_file, } def dump_command_output(): if log: stdout_file.flush() for stdout_line in stdout_file_read.readlines(): logger.info(stdout_color + 'Execution output: {0}'.format(stdout_line) + RESET) stderr_file.flush() for stderr_line in stderr_file_read.readlines(): logger.error(RED + 'Execution error: {0}'.format(stderr_line) + RESET) def return_parsable_output(): stdout_file.flush() with open(file_obj_stdout.name, 'r') as fout: return '\n'.join(fout.readlines()) if log: logger.info('Executing command {0}...'.format(command)) time_started = datetime.now() p = subprocess.Popen(**popen_args) if detach: return p while p.poll() is None: if log: logger.info('Command {0} still executing...'.format(command)) dump_command_output() if datetime.now() - time_started > timedelta(seconds=timeout): raise EcosystemTimeout('The timeout was reached.') sleep(2) dump_command_output() if log: logger.info('Command finished {0}...'.format(command)) if p.returncode: dump_command_output() raise EcosystemTestException('Command failed.'.format(p.returncode)) if log: logger.info('Command succeeded {0}...'.format(command)) return return_parsable_output()
def get_deployment_outputs(deployment_id): logger.info('Getting deployment outputs {0}'.format(deployment_id)) return cloudify_exec( 'cfy deployments outputs --json {0}'.format(deployment_id))
def get_deployment_output_by_name(deployment_id, output_id): logger.info( 'Getting deployment output: {output_id}'.format(output_id=output_id)) outputs = get_deployment_outputs(deployment_id) return outputs.get(output_id, {}).get('value')