def _test_no_warning_on_missing_host_environment_var_on_silent_commands( self, cmd): options = { 'COMMAND': cmd, '--file': [EnvironmentTest.compose_file.name] } with mock.patch('compose.config.environment.log') as fake_log: # Note that the warning silencing and the env variables check is # done in `project_from_options` # So no need to have a proper options map, the `COMMAND` key is enough project_from_options('.', options) assert fake_log.warn.call_count == 0
def __init__(self, path, options, norecreate, nodeps, running, filelog=None): global log if "log" not in globals(): if filelog is not None: log = logging.getLogger(__name__) log.addHandler(logger.FileHandler(filelog)) log.setLevel(logging.DEBUG) else: log = logging.getLogger(__name__) log.addHandler(logger.StreamHandler()) log.setLevel(logging.DEBUG) self.path = path self.options = options self.norecreate = norecreate self.nodeps = nodeps self.running = running try: self.project = command.project_from_options( self.path, self.options) except ConfigurationError: log.error("Can't create a monitor unit\n{}".format( traceback.format_exc())) raise SystemExit
def launch_docker_compose(base_path, temp_dir, verb, services=[], no_color=False, extra_command_options=dict(), **context): version = compose_format_version(base_path) jinja_render_to_temp(('%s-docker-compose.j2.yml' if version == 2 else '%s-docker-compose-v1.j2.yml') % (verb, ), temp_dir, 'docker-compose.yml', hosts=extract_hosts_from_docker_compose(base_path), **context) options = DEFAULT_COMPOSE_OPTIONS.copy() options.update({ u'--file': [ os.path.normpath( os.path.join(base_path, 'ansible', 'container.yml')), os.path.join(temp_dir, 'docker-compose.yml') ], u'COMMAND': 'up', u'ARGS': ['--no-build'] + services }) command_options = DEFAULT_COMPOSE_UP_OPTIONS.copy() command_options[u'--no-build'] = True command_options[u'--no-color'] = no_color command_options[u'SERVICE'] = services command_options.update(extra_command_options) os.environ['ANSIBLE_CONTAINER_BASE'] = os.path.realpath(base_path) project = project_from_options('.', options) command = main.TopLevelCommand(project) command.up(command_options)
def launch_docker_compose(base_path, project_name, temp_dir, verb, services=[], no_color=False, extra_command_options=dict(), **context): version = compose_format_version(base_path) jinja_render_to_temp(('%s-docker-compose.j2.yml' if version == 2 else '%s-docker-compose-v1.j2.yml') % (verb,), temp_dir, 'docker-compose.yml', hosts=extract_hosts_from_docker_compose( base_path), project_name=project_name, base_path=os.path.realpath(base_path), **context) options = DEFAULT_COMPOSE_OPTIONS.copy() options.update({ u'--file': [ os.path.normpath( os.path.join(base_path, 'ansible', 'container.yml') ), os.path.join(temp_dir, 'docker-compose.yml')], u'COMMAND': 'up', u'ARGS': ['--no-build'] + services }) command_options = DEFAULT_COMPOSE_UP_OPTIONS.copy() command_options[u'--no-build'] = True command_options[u'--no-color'] = no_color command_options[u'SERVICE'] = services command_options.update(extra_command_options) project = project_from_options(base_path, options) command = main.TopLevelCommand(project) command.up(command_options)
def docker_project() -> Project: """ Builds the Docker project if necessary, once per session. Returns the project instance, which can be used to start and stop the Docker containers. """ path = _base_path() docker_compose = Path(path) if docker_compose.is_dir(): docker_compose /= "docker-compose.yml" if not docker_compose.is_file(): raise ValueError( "Unable to find `{0}` for integration tests.".format( docker_compose), ) project = project_from_options(project_dir=str(docker_compose.parent), options={ "--file": [docker_compose.name], '--project-name': DOCKER_COMPOSE_PROJECT }) project.build() return project
def terminate(self, operation, temp_dir, hosts=[]): self.temp_dir = temp_dir extra_options = getattr(self, 'terminate_%s_extra_args' % operation)() config = getattr(self, 'get_config_for_%s' % operation)() logger.debug('%s' % (config, )) config_yaml = yaml_dump(config) logger.debug('Config YAML is') logger.debug(config_yaml) jinja_render_to_temp('%s-docker-compose.j2.yml' % (operation, ), temp_dir, 'docker-compose.yml', hosts=self.all_hosts_in_orchestration(), project_name=self.project_name, base_path=self.base_path, params=self.params, api_version=self.api_version, config=config_yaml, env=os.environ) options = self.DEFAULT_COMPOSE_OPTIONS.copy() options.update({ u'--verbose': self.params['debug'], u'--file': [os.path.join(temp_dir, 'docker-compose.yml')], u'COMMAND': 'stop', u'--project-name': 'ansible' }) command_options = self.DEFAULT_COMPOSE_STOP_OPTIONS.copy() command_options[u'SERVICE'] = hosts command_options.update(extra_options) project = project_from_options(self.base_path, options) command = main.TopLevelCommand(project) command.stop(command_options)
def docker_project(self, request): """ Builds the Docker project if necessary, once per session. Returns the project instance, which can be used to start and stop the Docker containers. """ docker_compose = Path(request.config.getoption("docker_compose")) if docker_compose.is_dir(): docker_compose /= "docker-compose.yml" if not docker_compose.is_file(): raise ValueError( "Unable to find `{docker_compose}` " "for integration tests.".format( docker_compose=docker_compose, ), ) project = project_from_options( project_dir=str(docker_compose.parent), options={"--file": [docker_compose.name]}, ) project.build() return project
def terminate(self, operation, temp_dir, hosts=[]): self.temp_dir = temp_dir extra_options = getattr(self, 'terminate_%s_extra_args' % operation)() config = getattr(self, 'get_config_for_%s' % operation)() logger.debug('%s' % (config,)) config_yaml = yaml_dump(config) logger.debug('Config YAML is') logger.debug(config_yaml) jinja_render_to_temp('%s-docker-compose.j2.yml' % (operation,), temp_dir, 'docker-compose.yml', hosts=self.all_hosts_in_orchestration(), project_name=self.project_name, base_path=self.base_path, params=self.params, api_version=self.api_version, config=config_yaml, env=os.environ) options = self.DEFAULT_COMPOSE_OPTIONS.copy() options.update({ u'--verbose': self.params['debug'], u'--file': [ os.path.join(temp_dir, 'docker-compose.yml')], u'COMMAND': 'stop', u'--project-name': 'ansible' }) command_options = self.DEFAULT_COMPOSE_STOP_OPTIONS.copy() command_options[u'SERVICE'] = hosts command_options.update(extra_options) project = project_from_options(self.base_path, options) command = main.TopLevelCommand(project) command.stop(command_options)
def orchestrate(self, operation, temp_dir, hosts=[], context={}): """ Execute the compose engine. :param operation: One of build, run, or listhosts :param temp_dir: A temporary directory usable as workspace :param hosts: (optional) A list of hosts to limit orchestration to :return: The exit status of the builder container (None if it wasn't run) """ if self.params.get('detached'): is_detached = True del self.params['detached'] self.temp_dir = temp_dir try: builder_img_id = self.get_image_id_by_tag( self.builder_container_img_tag) except NameError: image_version = '.'.join(release_version.split('.')[:2]) builder_img_id = 'ansible/%s:%s' % (self.builder_container_img_tag, image_version) extra_options = getattr(self, 'orchestrate_%s_extra_args' % operation)() config = getattr(self, 'get_config_for_%s' % operation)() logger.debug('%s' % (config,)) config_yaml = yaml_dump(config) logger.debug('Config YAML is') logger.debug(config_yaml) jinja_render_to_temp('%s-docker-compose.j2.yml' % (operation,), temp_dir, 'docker-compose.yml', hosts=self.all_hosts_in_orchestration(), project_name=self.project_name, base_path=self.base_path, params=self.params, api_version=self.api_version, builder_img_id=builder_img_id, config=config_yaml, env=os.environ, **context) options = self.DEFAULT_COMPOSE_OPTIONS.copy() options.update({ u'--verbose': self.params['debug'], u'--file': [ os.path.join(temp_dir, 'docker-compose.yml')], u'COMMAND': 'up', u'ARGS': ['--no-build'] + hosts, u'--project-name': 'ansible' }) command_options = self.DEFAULT_COMPOSE_UP_OPTIONS.copy() command_options[u'--no-build'] = True command_options[u'SERVICE'] = hosts if locals().get('is_detached'): logger.info('Deploying application in detached mode') command_options[u'-d'] = True command_options.update(extra_options) project = project_from_options(self.base_path, options) command = main.TopLevelCommand(project) command.up(command_options)
def bootstrap_env(self, temp_dir, behavior, operation, compose_option, builder_img_id=None, context=None): """ Build common Docker Compose elements required to execute orchestrate, terminate, restart, etc. :param temp_dir: A temporary directory usable as workspace :param behavior: x in x_operation_extra_args :param operation: Operation to perform, like, build, run, listhosts, etc :param compose_option: x in DEFAULT_COMPOSE_X_OPTIONS :param builder_img_id: Ansible Container Builder Image ID :param context: extra context to send to jinja_render_to_temp :return: options (options to pass to compose), command_options (operation options to pass to compose), command (compose's top level command) """ if context is None: context = {} self.temp_dir = temp_dir extra_options = getattr(self, '{}_{}_extra_args'.format(behavior, operation))() config = getattr(self, 'get_config_for_%s' % operation)() logger.debug('%s' % (config,)) config_yaml = yaml_dump(config) logger.debug('Config YAML is') logger.debug(config_yaml) jinja_render_to_temp('%s-docker-compose.j2.yml' % (operation,), temp_dir, 'docker-compose.yml', hosts=self.all_hosts_in_orchestration(), project_name=self.project_name, base_path=self.base_path, params=self.params, api_version=self.api_version, builder_img_id=builder_img_id, config=config_yaml, env=os.environ, **context) options = self.DEFAULT_COMPOSE_OPTIONS.copy() options.update({ u'--verbose': self.params['debug'], u'--file': [ os.path.join(temp_dir, 'docker-compose.yml')], u'--project-name': 'ansible', }) command_options = getattr(self, 'DEFAULT_COMPOSE_{}_OPTIONS'.format( compose_option.upper())).copy() command_options.update(extra_options) project = project_from_options(self.base_path + '/ansible', options) command = main.TopLevelCommand(project) return options, command_options, command
def _get_project(self, host): try: compose_file_path = self._dump_compose_file(host) self._apply_environ(host) yield project_from_options('./', {'--file': [compose_file_path]}) except Exception as exc: raise finally: self._clean_environ()
def perform_command(options, handler, command_options): if options['COMMAND'] in ('help', 'version'): # Skip looking up the compose file. handler(command_options).run() return project = project_from_options('.', options) command = handler(project=project, options=command_options) with errors.handle_connection_errors(project.client): command.run()
def __init__(self): try: self.project = command.project_from_options('.', {}) except ComposeFileNotFound: print("No docker-compose found, create one with :") print('$ docky init') exit(-1) self.name = self.project.name self.loaded_config = None self.service = self._get_main_service(self.project)
def orchestrate(self, operation, temp_dir, hosts=[], context={}): """ Execute the compose engine. :param operation: One of build, run, or listhosts :param temp_dir: A temporary directory usable as workspace :param hosts: (optional) A list of hosts to limit orchestration to :return: The exit status of the builder container (None if it wasn't run) """ self.temp_dir = temp_dir try: builder_img_id = self.get_image_id_by_tag( self.builder_container_img_tag) except NameError: image_version = '.'.join(release_version.split('.')[:2]) builder_img_id = 'ansible/%s:%s' % (self.builder_container_img_tag, image_version) extra_options = getattr(self, 'orchestrate_%s_extra_args' % operation)() config = getattr(self, 'get_config_for_%s' % operation)() logger.debug('%s' % (config, )) config_yaml = yaml_dump(config) logger.debug('Config YAML is') logger.debug(config_yaml) jinja_render_to_temp('%s-docker-compose.j2.yml' % (operation, ), temp_dir, 'docker-compose.yml', hosts=self.all_hosts_in_orchestration(), project_name=self.project_name, base_path=self.base_path, params=self.params, api_version=self.api_version, builder_img_id=builder_img_id, config=config_yaml, env=os.environ, **context) options = self.DEFAULT_COMPOSE_OPTIONS.copy() options.update({ u'--verbose': self.params['debug'], u'--file': [os.path.join(temp_dir, 'docker-compose.yml')], u'COMMAND': 'up', u'ARGS': ['--no-build'] + hosts, u'--project-name': 'ansible' }) command_options = self.DEFAULT_COMPOSE_UP_OPTIONS.copy() command_options[u'--no-build'] = True command_options[u'SERVICE'] = hosts command_options.update(extra_options) project = project_from_options(self.base_path, options) command = main.TopLevelCommand(project) command.up(command_options)
def create(self, instance_id: str, content: str, c_type: str, **kwargs) -> None: """ This creates a set of containers using docker compose. Note: the use of the docker compose python module is unsupported by docker inc. :param content: the docker compose file as a string :return: """ if c_type != 'docker-compose': raise NotImplementedError( 'The type ({type}) of cluster manager is unknown'.format( type=c_type)) # when we get the manifest, we have to dump it to a temporary file # to allow for multiple stack instances we need to have multiple projects! # this means multiple directories mani_dir = self.manifest_cache + '/' + instance_id if not os.path.exists(mani_dir): os.makedirs(mani_dir) else: LOG.info( 'The instance is already running with the following project: {mani_dir}' .format(mani_dir=mani_dir)) LOG.warning('Content in this directory will be overwritten.') # XXX shouldn't this raise an exception? # can supply external parameters here... # parameters is the name set in the OSBA spec for additional parameters supplied on provisioning # if none supplied, we use an empty dict # add optionally supplied parameters as environment variables parameters = kwargs.get('parameters', dict()) env_list = list() for k, v in parameters.items(): LOG.info('Including as environment variable: {k}={v}'.format(k=k, v=v)) env_list.append(k + '=' + v) if len(env_list) > 0: m = yaml.load(content) for k, v in m['services'].items(): v['environment'] = env_list content = yaml.dump(m) LOG.debug('writing to: {compo}'.format(compo=mani_dir + '/docker-compose.yml')) m = open(mani_dir + '/docker-compose.yml', 'wt') m.write(content) m.close() project = project_from_options(mani_dir, self.options) cmd = TopLevelCommand(project) cmd.up(self.options)
def read_compose_project(config): from compose.cli.main import TopLevelCommand from compose.cli.command import project_from_options from compose.cli.docopt_command import DocoptDispatcher # import compose.cli.errors as errors dispatcher = DocoptDispatcher( TopLevelCommand, {'options_first': True, 'version': '1.8.0'}) cli_options = config + ['config'] options, handler, command_options = dispatcher.parse(cli_options) project = project_from_options('.', options) return project
def docker_project(self, request): """ Builds the Docker project if necessary, once per session. Returns the project instance, which can be used to start and stop the Docker containers. """ docker_compose = Path(request.config.getoption("docker_compose")) if docker_compose.is_dir(): docker_compose /= "docker-compose.yml" if not docker_compose.is_file(): raise ValueError( "Unable to find `{docker_compose}` " "for integration tests.".format( docker_compose=docker_compose.absolute(), ), ) project = project_from_options( project_dir=str(docker_compose.parent), options={"--file": [docker_compose.name]}, ) if not request.config.getoption("--docker-compose-no-build"): project.build() if request.config.getoption("--use-running-containers"): if not request.config.getoption("--docker-compose-no-build"): warnings.warn( UserWarning( "You used the '--use-running-containers' without the " "'--docker-compose-no-build' flag, the newly build " "containers won't be used if there are already " "containers running!")) current_containers = project.containers() containers = project.up() if not set(current_containers) == set(containers): warnings.warn( UserWarning( "You used the '--use-running-containers' but " "pytest-docker-compose could not find all containers " "running. The remaining containers have been started.") ) else: if any(project.containers()): raise ContainersAlreadyExist( "There are already existing containers, please remove all " "containers by running 'docker-compose down' before using " "the pytest-docker-compose plugin. Alternatively, you " "can use the '--use-running-containers' flag to indicate " "you will use the currently running containers.") return project
def project_from_options_for_each_dir(cls, dirs, *args, **kwargs): """ Create the Docker project from options, trying each project_dirs. """ exc = None for project_dir in dirs: try: project = project_from_options(project_dir=project_dir, *args, **kwargs) return project, project_dir except ComposeFileNotFound as local_exc: exc = local_exc raise exc
def info(self, instance_id: str, **kwargs) -> Dict[str, str]: mani_dir = self.manifest_cache + '/' + instance_id if not os.path.exists(mani_dir): LOG.warning( 'requested directory does not exist: {mani_dir}'.format( mani_dir=mani_dir)) return {} LOG.debug('info from: {compo}'.format(compo=mani_dir + '/docker-compose.yml')) project = project_from_options(mani_dir, self.options) containers = project.containers(service_names=self.options['SERVICE'], stopped=True) # rg = docker_handler.convert_to_resource_group(container_ids, resource_group_name=package_name) # for id in container_ids: # container = client.containers.get(id) info = dict() for c in containers: # basic info... name = c.name if name.endswith('_1'): name = c.name[0:-2] info[name + '_image_name'] = c.image_config['RepoTags'][0] info[name + '_image_id'] = c.image info[name + '_net_name'] = c.dictionary["HostConfig"]["NetworkMode"] info[name + '_cmd'] = c.human_readable_command info[name + '_state'] = c.human_readable_state info = {**info, **self.container_attrs(name, c.dictionary)} # environment info... for k, v in c.environment.items(): info[name + '_environment_' + k] = v # ip address info... # add the IP address of the container, assumes there's only 1 IP address assigned to container ip = [ value.get('IPAddress') for value in c.dictionary['NetworkSettings']['Networks'].values() ] info[name + '_' + 'Ip'] = ip[0] reconcile_state(info) LOG.debug('Stack\'s attrs:') LOG.debug(info) return info
def orchestrate(self, operation, temp_dir, hosts=[], context={}): """ Execute the compose engine. :param operation: One of build, run, or listhosts :param temp_dir: A temporary directory usable as workspace :param hosts: (optional) A list of hosts to limit orchestration to :return: The exit status of the builder container (None if it wasn't run) """ self.temp_dir = temp_dir builder_img_id = self.get_image_id_by_tag( self.builder_container_img_tag) extra_options = getattr(self, 'orchestrate_%s_extra_args' % operation)() config = getattr(self, 'get_config_for_%s' % operation)() logger.debug('%s' % (config,)) config_yaml = yaml_dump(config) logger.debug('Config YAML is') logger.debug(config_yaml) jinja_render_to_temp('%s-docker-compose.j2.yml' % (operation,), temp_dir, 'docker-compose.yml', hosts=self.config.get('services', {}).keys(), project_name=self.project_name, base_path=self.base_path, params=self.params, api_version=self.api_version, builder_img_id=builder_img_id, config=config_yaml, env=os.environ, **context) options = self.DEFAULT_COMPOSE_OPTIONS.copy() options.update({ u'--file': [ os.path.join(temp_dir, 'docker-compose.yml')], u'COMMAND': 'up', u'ARGS': ['--no-build'] + hosts, u'--project-name': 'ansible' }) command_options = self.DEFAULT_COMPOSE_UP_OPTIONS.copy() #command_options[u'--no-build'] = True command_options[u'--build'] = True if os.environ.get('DETACH'): command_options[u'-d'] = True command_options[u'SERVICE'] = hosts command_options.update(extra_options) project = project_from_options(self.base_path, options) command = main.TopLevelCommand(project) command.up(command_options)
def perform_command(options, handler, command_options, base_dir): if options['COMMAND'] in ('help', 'version'): # Skip looking up the compose file. handler(command_options) return if options['COMMAND'] == 'config': command = TopLevelCommand(None) handler(command, options, command_options) return #project = project_from_options('.', options) project = project_from_options(base_dir, options) command = TopLevelCommand(project) with errors.handle_connection_errors(project.client): return handler(command, command_options)
def orchestrate(self, operation, temp_dir, hosts=[], context={}): """ Execute the compose engine. :param operation: One of build, run, or listhosts :param temp_dir: A temporary directory usable as workspace :param hosts: (optional) A list of hosts to limit orchestration to :return: The exit status of the builder container (None if it wasn't run) """ self.temp_dir = temp_dir builder_img_id = self.get_image_id_by_tag(self.builder_container_img_tag) extra_options = getattr(self, "orchestrate_%s_extra_args" % operation)() config = getattr(self, "get_config_for_%s" % operation)() logger.debug("%s" % (config,)) config_yaml = yaml_dump(config) logger.debug("Config YAML is") logger.debug(config_yaml) jinja_render_to_temp( "%s-docker-compose.j2.yml" % (operation,), temp_dir, "docker-compose.yml", hosts=self.config.get("services", {}).keys(), project_name=self.project_name, base_path=self.base_path, params=self.params, api_version=self.api_version, builder_img_id=builder_img_id, config=config_yaml, env=os.environ, **context ) options = self.DEFAULT_COMPOSE_OPTIONS.copy() options.update( { u"--file": [os.path.join(temp_dir, "docker-compose.yml")], u"COMMAND": "up", u"ARGS": ["--no-build"] + hosts, u"--project-name": "ansible", } ) command_options = self.DEFAULT_COMPOSE_UP_OPTIONS.copy() command_options[u"--no-build"] = True command_options[u"SERVICE"] = hosts command_options.update(extra_options) project = project_from_options(self.base_path, options) command = main.TopLevelCommand(project) command.up(command_options)
def get_project(path, project_name=None): """ get docker project given file path """ # environment = Environment.from_env_file(path) # config_path = get_config_path_from_options(path, dict(), environment) # project = compose_get_project(path, config_path, project_name=project_name) options = { '--file': ['docker-compose.json'], '--host': None, '--project-name': project_name, '--verbose': False, '--project-directory': None, # override the path of the project '--compatibility': False } project = project_from_options(path, options) return project
def delete(self, instance_id: str, **kwargs) -> None: mani_dir = self.manifest_cache + '/' + instance_id if not os.path.exists(mani_dir): LOG.warning('requested directory does not exist: {mani_dir}'.format(mani_dir=mani_dir)) return self.options["--force"] = True self.options["--rmi"] = "none" LOG.info('destroying: {compo}'.format(compo=mani_dir + '/docker-compose.yml')) project = project_from_options(mani_dir, self.options) cmd = TopLevelCommand(project) cmd.down(self.options) try: shutil.rmtree(mani_dir) except PermissionError: # Done to let travis pass LOG.warning('Could not delete the directory {dir}'.format(dir=mani_dir))
def prepare(self, compose_files, docker_options=None) -> (TopLevelCommand, dict): # based on: # https://github.com/docker/compose/issues/3573 # https://github.com/docker/compose/pull/4992 os.environ['COMPOSE_IGNORE_ORPHANS'] = 'true' project_dir = commons.get_dt_path(self.docker_root) # based on: https://github.com/fruch/doorman/blob/master/tests/integration/conftest.py # set the options options = {**{ 'SERVICE': '', '--project-name': self.name, # important to set this to the current user dir '--project-directory': project_dir, '--file': compose_files, '--no-deps': False, '--abort-on-container-exit': False, '--remove-orphans': False, '--no-recreate': False, '--force-recreate': False, '--build': False, '--no-build': False, '--rmi': 'none', '--volumes': '', '--follow': False, '--timestamps': False, '--tail': 'all', '--scale': '', '--no-color': False, '-d': True, '--always-recreate-deps': True }, **(docker_options if docker_options else {})} # compose the project project = project_from_options(project_dir, options) # prepare the commands cmd = TopLevelCommand(project) # return the options return cmd, options
def docker_project(self, request): """ Builds the Docker project if necessary, once per session. Returns the project instance, which can be used to start and stop the Docker containers. """ compose_files = [] for docker_compose in [ Path(f) for f in request.config.getoption("docker_compose").split(',') ]: if docker_compose.is_dir(): docker_compose /= "docker-compose.yml" if not docker_compose.is_file(): raise ValueError( "Unable to find `{docker_compose}` " "for integration tests.".format( docker_compose=docker_compose.absolute(), ), ) compose_files.append(docker_compose) if len(compose_files) > 1: # py35 needs strings for os.path functions project_dir = os.path.commonpath([str(f) for f in compose_files]) or '.' compose_files = [p.relative_to(project_dir) for p in compose_files] else: project_dir = '.' # py35 needs strings for os.path functions # Must be a list; will get accessed multiple times. # https://github.com/pytest-docker-compose/pytest-docker-compose/pull/72 compose_files = [str(p) for p in compose_files] project = project_from_options( project_dir=str(project_dir), options={"--file": compose_files}, ) if not request.config.getoption("--docker-compose-no-build"): project.build() if request.config.getoption("--use-running-containers"): if not request.config.getoption("--docker-compose-no-build"): warnings.warn( UserWarning( "You used the '--use-running-containers' without the " "'--docker-compose-no-build' flag, the newly build " "containers won't be used if there are already " "containers running!")) current_containers = project.containers() containers = project.up() if not set(current_containers) == set(containers): warnings.warn( UserWarning( "You used the '--use-running-containers' but " "pytest-docker-compose could not find all containers " "running. The remaining containers have been started.") ) else: if any(project.containers()): raise ContainersAlreadyExist( "There are already existing containers, please remove all " "containers by running 'docker-compose down' before using " "the pytest-docker-compose plugin. Alternatively, you " "can use the '--use-running-containers' flag to indicate " "you will use the currently running containers.") return project
def bootstrap_env(self, temp_dir, behavior, operation, compose_option, builder_img_id=None, context=None): """ Build common Docker Compose elements required to execute orchestrate, terminate, restart, etc. :param temp_dir: A temporary directory usable as workspace :param behavior: x in x_operation_extra_args :param operation: Operation to perform, like, build, run, listhosts, etc :param compose_option: x in DEFAULT_COMPOSE_X_OPTIONS :param builder_img_id: Ansible Container Builder Image ID :param context: extra context to send to jinja_render_to_temp :return: options (options to pass to compose), command_options (operation options to pass to compose), command (compose's top level command) """ if context is None: context = {} self.temp_dir = temp_dir extra_options = getattr(self, '{}_{}_extra_args'.format(behavior, operation))() config = getattr(self, 'get_config_for_%s' % operation)() logger.debug('%s' % (config,)) config_yaml = yaml_dump(config) if config else '' logger.debug('Config YAML is') logger.debug(config_yaml) hosts = self.all_hosts_in_orchestration() if operation == 'build' and self.params.get('service'): # build operation is limited to a specific list of services hosts = list(set(hosts).intersection(self.params['service'])) jinja_render_to_temp('%s-docker-compose.j2.yml' % (operation,), temp_dir, 'docker-compose.yml', hosts=hosts, project_name=self.project_name, base_path=self.base_path, params=self.params, api_version=self.api_version, builder_img_id=builder_img_id, config=config_yaml, env=os.environ, **context) options = self.DEFAULT_COMPOSE_OPTIONS.copy() options.update({ u'--verbose': self.params['debug'], u'--file': [ os.path.join(temp_dir, 'docker-compose.yml')], u'--project-name': 'ansible', }) command_options = getattr(self, 'DEFAULT_COMPOSE_{}_OPTIONS'.format( compose_option.upper())).copy() command_options.update(extra_options) project = project_from_options(self.base_path + '/ansible', options) command = main.TopLevelCommand(project) return options, command_options, command
def convert(convert_type: str, compose_file_name: str, output_path: str, cr: str): project_dir = Path(compose_file_name).absolute().resolve().parent options = { '--compatibility': False, '--env-file': None, '--file': [compose_file_name], '--help': False, '--host': None, '--log-level': None, '--no-ansi': False, '--project-directory': project_dir, '--project-name': None, '--skip-hostname-check': False, '--tls': False, '--tlscacert': None, '--tlscert': None, '--tlskey': None, '--tlsverify': False, '--verbose': False, '--version': False, '-h': False, 'ARGS': ['--force-recreate'], 'COMMAND': 'up' } project = project_from_options(".", options) output_path = Path(output_path).absolute().resolve() modules = get_module_options(compose_file_name) template["modulesContent"]["$edgeAgent"]["properties.desired"]["modules"] = modules if convert_type == "file": # only create deployment manifest with open(str(output_path), "w") as fp: fp.write(json.dumps(template, indent=2)) template_to_manifest(output_path, output_path) if convert_type == "project": # need some file copy operation if convert type is project if not output_path.is_dir(): output_path.mkdir() modules_dir = output_path.joinpath("modules") modules_dir.mkdir() deployment_file = output_path.joinpath("deployment.template.json") with open(str(deployment_file), "w") as fp: fp.write(json.dumps(template, indent=2)) # create module.json for name in project.service_names: service = project.get_service(name) build_opt = service.options.get("build", {}) # make module directory if "context" in build_opt: shutil.copytree(build_opt["context"], str(modules_dir.joinpath(name))) # convert build options from compose file to docker cli command buildOptions = [] dockerfile = "Dockerfile" if "dockerfile" in build_opt: dockerfile = build_opt["dockerfile"] if "args" in build_opt and build_opt["args"]: if isinstance(build_opt["args"], dict): for arg in build_opt["args"]: buildOptions.append("--build-arg {}={}".format(arg, build_opt["args"][arg])) elif isinstance(build_opt["args"], list): for arg in build_opt["args"]: buildOptions.append("--build-arg {}".format(arg)) if "cache_from" in build_opt: for item in build_opt["cache_from"]: buildOptions.append("--cache-from {}".format(item)) if "labels" in build_opt: if isinstance(build_opt["labels"], dict): for item in build_opt["labels"]: buildOptions.append("--label {}={}".format(item, build_opt["labels"][item])) elif isinstance(build_opt["labels"], list): for item in build_opt["labels"]: buildOptions.append("--label {}".format(item)) if "shm_size" in build_opt: buildOptions.append("--shm-size {}".format(build_opt["shm_size"])) if "target" in build_opt: buildOptions.append("--target {}".format(build_opt["target"])) module_json_template = { "$schema-version": "0.0.1", "description": "", "image": { "repository": "{}/{}".format(cr, name), "tag": { "version": "0.0.1", "platforms": { "amd64": dockerfile, } }, "buildOptions": buildOptions, "contextPath": "./" } } if build_opt: # create module.json in module folder module_json_path = modules_dir.joinpath(name).joinpath("module.json") with open(str(module_json_path), "w", encoding="utf8") as fp: fp.write(json.dumps(module_json_template, indent=4)) fp.write("\n") # create .env env_file = output_path.joinpath(".env") with open(str(env_file), "w", encoding="utf8") as fp: fp.write("CONTAINER_REGISTRY_USERNAME=\n") fp.write("CONTAINER_REGISTRY_PASSWORD=\n") fp.write("CONTAINER_REGISTRY_ADDRESS={}\n".format(cr))
def __init__(self, client): super(ContainerManager, self).__init__() self.client = client self.project_src = None self.files = None self.project_name = None self.state = None self.definition = None self.hostname_check = None self.timeout = None self.remove_images = None self.remove_orphans = None self.remove_volumes = None self.stopped = None self.restarted = None self.recreate = None self.build = None self.dependencies = None self.services = None self.scale = None self.debug = None self.pull = None self.nocache = None for key, value in client.module.params.items(): setattr(self, key, value) self.check_mode = client.check_mode if not self.debug: self.debug = client.module._debug self.options = dict() self.options.update(self._get_auth_options()) self.options[u'--skip-hostname-check'] = (not self.hostname_check) if self.project_name: self.options[u'--project-name'] = self.project_name if self.files: self.options[u'--file'] = self.files if not HAS_COMPOSE: self.client.fail("Unable to load docker-compose. Try `pip install docker-compose`. Error: %s" % HAS_COMPOSE_EXC) if LooseVersion(compose_version) < LooseVersion(MINIMUM_COMPOSE_VERSION): self.client.fail("Found docker-compose version %s. Minimum required version is %s. " "Upgrade docker-compose to a min version of %s." % (compose_version, MINIMUM_COMPOSE_VERSION, MINIMUM_COMPOSE_VERSION)) self.log("options: ") self.log(self.options, pretty_print=True) if self.definition: if not HAS_YAML: self.client.fail("Unable to load yaml. Try `pip install PyYAML`. Error: %s" % HAS_YAML_EXC) if not self.project_name: self.client.fail("Parameter error - project_name required when providing definition.") self.project_src = tempfile.mkdtemp(prefix="ansible") compose_file = os.path.join(self.project_src, "docker-compose.yml") try: self.log('writing: ') self.log(yaml.dump(self.definition, default_flow_style=False)) with open(compose_file, 'w') as f: f.write(yaml.dump(self.definition, default_flow_style=False)) except Exception as exc: self.client.fail("Error writing to %s - %s" % (compose_file, str(exc))) else: if not self.project_src: self.client.fail("Parameter error - project_src required.") try: self.log("project_src: %s" % self.project_src) self.project = project_from_options(self.project_src, self.options) except Exception as exc: self.client.fail("Configuration error - %s" % str(exc))
def get_docker_project(cls, request, all_docker_projects): """ Get the Docker project, creating a new one if it doesn't exists. Returns the project instance, which can be used to start and stop the Docker containers. """ testdir = request.fspath.dirname project_dir = request.config.getoption("project_dir") if project_dir: basedirs = [project_dir] else: basedirs = [ d for d in (os.path.join(testdir, 'docker-compose'), testdir, '.') if Path(d).is_dir() ] project = None for basedir in basedirs: try: project = all_docker_projects[basedir] break except KeyError: pass if not project: docker_compose_options = request.config.getoption("docker_compose") if docker_compose_options: files = [] split_deprecated_displayed = False splitted_docker_compose_options = [] for docker_compose_option in docker_compose_options: if ',' in docker_compose_option: if not split_deprecated_displayed: warnings.warn( DeprecationWarning( "Using ',' in --docker-compose option to specify multiple compose files is deprecated." "You can now use --docker-compose many times in the same command." )) split_deprecated_displayed = True for splitted_docker_compose_option in docker_compose_option.split( ','): splitted_docker_compose_options.append( splitted_docker_compose_option) else: splitted_docker_compose_options.append( docker_compose_option) if not project_dir: project_dir = cls.projectdir_from_basedirs_and_docker_compose_options( basedirs, docker_compose_options) for docker_compose_option in docker_compose_options: docker_compose_path = Path(project_dir).joinpath( docker_compose_option) if docker_compose_path.is_dir(): docker_compose_path = docker_compose_path.joinpath( "docker-compose.yml") if not docker_compose_path.is_file(): raise ValueError( "Unable to find `{docker_compose}` " "for integration tests in following directories: `{basedirs}`." .format(docker_compose=docker_compose_option, basedirs=basedirs), ) files.append( str(docker_compose_path.relative_to(project_dir))) project_key = '|'.join(files) project = project_from_options( project_dir=str(project_dir), options={"--file": files}, ) else: project, project_key = cls.project_from_options_for_each_dir( basedirs, options={}) all_docker_projects[project_key] = project if not request.config.getoption("--docker-compose-no-build"): project.build() if request.config.getoption("--use-running-containers"): if not request.config.getoption("--docker-compose-no-build"): warnings.warn( UserWarning( "You used the '--use-running-containers' without the " "'--docker-compose-no-build' flag, the newly build " "containers won't be used if there are already " "containers running!")) current_containers = project.containers() containers = project.up() if not set(current_containers) == set(containers): warnings.warn( UserWarning( "You used the '--use-running-containers' but " "pytest-docker-compose could not find all containers " "running. The remaining containers have been started.") ) else: if any(project.containers()): raise ContainersAlreadyExist( "There are already existing containers, please remove all " "containers by running 'docker-compose down' before using " "the pytest-docker-compose plugin. Alternatively, you " "can use the '--use-running-containers' flag to indicate " "you will use the currently running containers.") return project
def pre_integration_setup(request): """Determines if the integration tests can be run, and sets up the containers""" should_skip = not ( os.getenv("CI", "").upper() == "TRUE" or request.config.getoption("--run-integration") ) if should_skip: pytest.xfail("Not running in CI, and --run-integration not specified") command = ["docker", "system", "info"] docker_host = request.config.getoption("--docker-host") if docker_host: command.insert(1, f"-H {docker_host}") docker_available = not subprocess.call( command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, shell=True, ) if not docker_available: pytest.fail("Integration tests specified but Docker not detected") integration_sim = request.config.getoption("--integration-sim") # Grab and run the pre_integration_check function for the specified simulator mod = importlib.import_module(f"{__package__}.{integration_sim}.__init__") try: pre_integration_check = getattr(mod, "pre_integration_check") except AttributeError: pytest.exit( f"Integration test package for {integration_sim} has no function named " "pre_integration_check", 1, ) pre_integration_check() api_host = docker_host.split(":")[0] if docker_host else "localhost" api_base = f"http://{api_host}:{Settings.PORT}{API_PREFIX}" tests.integration.API_BASE = api_base compose_file = Path("tests", "integration", integration_sim, "docker-compose.yml",) if not compose_file.exists(): pytest.exit(f"Couldn't find {compose_file}", 1) project = project_from_options( project_dir=str(compose_file.parent), options={ "--file": [compose_file.name], "--host": docker_host or None, "--project-name": "bluebird", }, ) project.up(do_build=BuildAction.force) print("\n=== Integration setup complete ===\n") yield project # Runs all the tests project.down(ImageType.none, include_volumes=True, timeout=0)
def get_handle(self): return TopLevelCommand( project_from_options(self.project_dir, self.options))
def __init__(self, client): super(ContainerManager, self).__init__() self.client = client self.project_src = None self.files = None self.project_name = None self.state = None self.definition = None self.hostname_check = None self.timeout = None self.remove_images = None self.remove_orphans = None self.remove_volumes = None self.stopped = None self.restarted = None self.recreate = None self.build = None self.dependencies = None self.services = None self.scale = None self.debug = None self.pull = None self.nocache = None for key, value in client.module.params.items(): setattr(self, key, value) self.check_mode = client.check_mode if not self.debug: self.debug = client.module._debug self.options = dict() self.options.update(self._get_auth_options()) self.options[u'--skip-hostname-check'] = (not self.hostname_check) if self.project_name: self.options[u'--project-name'] = self.project_name if self.files: self.options[u'--file'] = self.files if not HAS_COMPOSE: self.client.fail( "Unable to load docker-compose. Try `pip install docker-compose`. Error: %s" % HAS_COMPOSE_EXC) if LooseVersion(compose_version) < LooseVersion( MINIMUM_COMPOSE_VERSION): self.client.fail( "Found docker-compose version %s. Minimum required version is %s. " "Upgrade docker-compose to a min version of %s." % (compose_version, MINIMUM_COMPOSE_VERSION, MINIMUM_COMPOSE_VERSION)) self.log("options: ") self.log(self.options, pretty_print=True) if self.definition: if not HAS_YAML: self.client.fail( "Unable to load yaml. Try `pip install PyYAML`. Error: %s" % HAS_YAML_EXC) if not self.project_name: self.client.fail( "Parameter error - project_name required when providing definition." ) self.project_src = tempfile.mkdtemp(prefix="ansible") compose_file = os.path.join(self.project_src, "docker-compose.yml") try: self.log('writing: ') self.log(yaml.dump(self.definition, default_flow_style=False)) with open(compose_file, 'w') as f: f.write( yaml.dump(self.definition, default_flow_style=False)) except Exception as exc: self.client.fail("Error writing to %s - %s" % (compose_file, str(exc))) else: if not self.project_src: self.client.fail("Parameter error - project_src required.") try: self.log("project_src: %s" % self.project_src) self.project = project_from_options(self.project_src, self.options) except Exception as exc: self.client.fail("Configuration error - %s" % str(exc))
def get_module_options(compose_file_name: str, debug=False) -> dict: project_dir = Path(compose_file_name).absolute().resolve().parent options = { '--compatibility': False, '--env-file': None, '--file': [compose_file_name], '--help': False, '--host': None, '--log-level': None, '--no-ansi': False, '--project-directory': project_dir, '--project-name': None, '--skip-hostname-check': False, '--tls': False, '--tlscacert': None, '--tlscert': None, '--tlskey': None, '--tlsverify': False, '--verbose': False, '--version': False, '-h': False, 'ARGS': ['--force-recreate'], 'COMMAND': 'up' } project = project_from_options(".", options) modules = {} for name in project.service_names: service = project.get_service(name) create_option = service._get_container_create_options({}, 1) keys = [ "image", "command", "hostname", "user", "detach", "stdin_open", "tty", "ports", "environment", "volumes", "network_disabled", "entrypoint", "working_dir", "domainname", "host_config", "mac_address", "labels", "stop_signal", "networking_config", "healthcheck", "stop_timeout", "runtime" ] # if keys not exist, set default, required by function ContainerConfig params = {} for key in keys: if key in create_option: params[key] = create_option[key] else: params[key] = None if key in ["detach", "stdin_open", "tty"]: params[key] = False create_options = ContainerConfig(project.config_version.vstring, **params) # delete empty fields delete_list = [] for k, v in create_options.items(): if not v: delete_list.append(k) for k in delete_list: del create_options[k] delete_list = [] for k, v in create_options["HostConfig"].items(): if not v: delete_list.append(k) for k in delete_list: del create_options["HostConfig"][k] if service.network_mode.network_mode == project.name + "_default": try: del create_options["HostConfig"]["NetworkMode"] except KeyError: pass try: del create_options["NetworkingConfig"] except KeyError: pass restart_policy = "always" try: restart_policy = create_options["HostConfig"]["RestartPolicy"]["Name"] del create_options["HostConfig"]["RestartPolicy"] if restart_policy == "no": restart_policy = "never" except KeyError: pass # set folder reference if build option in compose file build_opt = service.options.get("build", {}) image = create_options["Image"] del create_options["Image"] if build_opt: image = "${{MODULES.{}}}".format(name) modules[name] = { "version": "1.0", "type": "docker", "status": "running", "restartPolicy": restart_policy, "settings": { "image": image, "createOptions": create_options } } return modules