def test_docker_client_no_home(self): with mock.patch.dict(os.environ): try: del os.environ['HOME'] except KeyError: pass docker_client(os.environ)
def polling_service_status_swarm_mode(): while True: time.sleep(config.SWARM_MODE_POLLING_INTERVAL) try: try: docker = docker_client() except: docker = docker_client(os.environ) tasks = docker.tasks(filters={"desired-state": "running"}) linked_tasks = set() for task in tasks: task_nets = [ network.get("Network", {}).get("ID", "") for network in task.get("NetworksAttachments", []) ] task_service_id = task.get("ServiceID", "") if task_service_id != Haproxy.cls_service_id and Haproxy.cls_nets.intersection( set(task_nets)): task_id = task.get("ID", "") linked_tasks.add(task_id) if Haproxy.cls_linked_tasks != linked_tasks: add_haproxy_run_task("Tasks are updated") except APIError as e: logger.info("Docker API error: %s" % e)
def listen_docker_events(): try: try: docker = docker_client() except: docker = docker_client(os.environ) docker.ping() for event in docker.events(decode=True): logger.debug(event) attr = event.get("Actor", {}).get("Attributes") compose_project = attr.get("com.docker.compose.project", "") compose_service = attr.get("com.docker.compose.service", "") container_name = attr.get("name", "") event_action = event.get("Action", "") service = "%s_%s" % (compose_project, compose_service) if service in Haproxy.cls_linked_services and event_action in [ "start", "die" ]: msg = "Docker event: container %s %s" % (container_name, event_action) run_haproxy(msg) except APIError as e: logger.info("Docker API error: %s" % e)
def test_docker_client_no_home(self): with mock.patch.dict(os.environ): try: del os.environ['HOME'] except KeyError: pass docker_client(os.environ, version=DEFAULT_DOCKER_API_VERSION)
def _init_compose_mode_links(): try: try: docker = docker_client() except: docker = docker_client(os.environ) docker.ping() container_id = os.environ.get("HOSTNAME", "") haproxy_container = docker.inspect_container(container_id) except Exception as e: logger.info("Docker API error, regressing to legacy links mode: %s" % e) return None try: links, Haproxy.cls_linked_services = ComposeModeLinkHelper.get_compose_mode_links(docker, haproxy_container) except Exception as e: logger.info("Docker API error, regressing to legacy links mode: %s" % e) return None if ADDITIONAL_SERVICES: additional_links, additional_services = ComposeModeLinkHelper.get_additional_links(docker, ADDITIONAL_SERVICES) if additional_links and additional_services: links.update(additional_links) Haproxy.cls_linked_services.update(additional_services) logger.info("Linked service: %s", ", ".join(ComposeModeLinkHelper.get_service_links_str(links))) logger.info("Linked container: %s", ", ".join(ComposeModeLinkHelper.get_container_links_str(links))) return links
def _init_swarm_mode_links(): try: try: docker = docker_client() except: docker = docker_client(os.environ) docker.ping() except Exception as e: logger.info( "Docker API error, regressing to legacy links mode: %s" % e) return None haproxy_container_id = os.environ.get("HOSTNAME", "") Haproxy.cls_service_id, Haproxy.cls_nets = SwarmModeLinkHelper.get_swarm_mode_haproxy_id_nets( docker, haproxy_container_id) links, Haproxy.cls_linked_tasks = SwarmModeLinkHelper.get_swarm_mode_links( docker, Haproxy.cls_service_id, Haproxy.cls_nets) logger.info( "Linked service: %s", ", ".join(SwarmModeLinkHelper.get_service_links_str(links))) logger.info( "Linked container: %s", ", ".join(SwarmModeLinkHelper.get_container_links_str(links))) return links
def _init_compose_mode_network(): try: try: docker = docker_client() except: docker = docker_client(os.environ) docker.ping() container_id = os.environ.get("HOSTNAME", "") haproxy_container = docker.inspect_container(container_id) except Exception as e: logger.info( "Docker API error, regressing to legacy links mode: %s" % e) return None try: links, Haproxy.cls_linked_services = ComposeModeNetworksHelper.get_compose_mode_networks( docker, haproxy_container) except Exception as e: logger.info( "Docker API error, regressing to legacy links mode: %s" % e) return None logger.info("Linked service: %s", ", ".join(ComposeModeHelper.get_service_links_str(links))) logger.info( "Linked container: %s", ", ".join(ComposeModeHelper.get_container_links_str(links))) return links
def _init_compose_mode_links(): try: try: docker = docker_client() except: docker = docker_client(os.environ) docker.ping() container_id = os.environ.get("HOSTNAME", "") haproxy_container = docker.inspect_container(container_id) except Exception as e: logger.info( "Docker API error, regressing to legacy links mode: %s" % e) return None try: links, Haproxy.cls_linked_services = ComposeModeLinkHelper.get_compose_mode_links( docker, haproxy_container) except Exception as e: logger.info( "Docker API error, regressing to legacy links mode: %s" % e) return None if ADDITIONAL_SERVICES: additional_links, additional_services = ComposeModeLinkHelper.get_additional_links( docker, ADDITIONAL_SERVICES) if additional_links and additional_services: links.update(additional_links) Haproxy.cls_linked_services.update(additional_services) logger.info( "Linked service: %s", ", ".join(ComposeModeLinkHelper.get_service_links_str(links))) logger.info( "Linked container: %s", ", ".join(ComposeModeLinkHelper.get_container_links_str(links))) return links
def listen_docker_events_compose_mode(): while True: try: try: docker = docker_client() except: docker = docker_client(os.environ) docker.ping() for event in docker.events(decode=True): logger.debug(event) attr = event.get("Actor", {}).get("Attributes", {}) compose_project = attr.get("com.docker.compose.project", "") compose_service = attr.get("com.docker.compose.service", "") container_name = attr.get("name", "") event_action = event.get("Action", "") service = "%s_%s" % (compose_project, compose_service) if service in Haproxy.cls_linked_services and event_action in ["start", "die"]: msg = "Docker event: container %s %s" % (container_name, event_action) add_haproxy_run_task(msg) except APIError as e: logger.info("Docker API error: %s" % e) time.sleep(1) add_haproxy_run_task("Reconnect docker events")
def check_running_mode(container_uri, service_uri, api_auth): mode, msg = None, "" if container_uri and service_uri and api_auth: if container_uri and service_uri: if api_auth: msg = "dockercloud/haproxy %s has access to the Docker Cloud API - will reload list of backends " \ " in real-time" % __version__ else: msg = "dockercloud/haproxy %s is unable to access the Docker Cloud API - you might want to" \ " give an API role to this service for automatic backend reconfiguration" % __version__ mode = RunningMode.CloudMode else: reason = "" try: try: docker = docker_client() except: docker = docker_client(os.environ) docker.ping() except Exception as e: reason = "unable to connect to docker daemon %s" % e mode = RunningMode.LegacyMode if mode != RunningMode.LegacyMode: container_id = os.environ.get("HOSTNAME", "") if not container_id: reason = "unable to get dockercloud/haproxy container ID, is HOSTNAME envvar overwritten?" mode = RunningMode.LegacyMode else: try: container = docker.inspect_container(container_id) if container.get("HostConfig", {}).get("Links", []): reason = "dockercloud/haproxy container is running on default bridge" mode = RunningMode.LegacyMode else: labels = container.get("Config", {}).get("Labels", {}) if labels.get("com.docker.swarm.service.id", ""): mode = RunningMode.SwarmMode elif labels.get("com.docker.compose.project", ""): mode = RunningMode.ComposeMode else: reason = "dockercloud/haproxy container doesn't contain any compose or swarm labels" mode = RunningMode.LegacyMode except Exception as e: reason = "unable to get dockercloud/haproxy container inspect information, %s" % e mode = RunningMode.LegacyMode logger.info("dockercloud/haproxy %s is running outside Docker Cloud" % __version__) if mode == RunningMode.LegacyMode: msg = "Haproxy is running using legacy link, loading HAProxy definition from environment variables: %s" % reason elif mode == RunningMode.ComposeMode: msg = "Haproxy is running by docker-compose, loading HAProxy definition through docker api" elif mode == RunningMode.SwarmMode: msg = "Haproxy is running in SwarmMode, loading HAProxy definition through docker api" logger.info(msg) return mode
def check_link_mode(container_uri, service_uri, api_auth): if container_uri and service_uri and api_auth: if container_uri and service_uri: if api_auth: logger.info("dockercloud/haproxy %s has access to the Docker Cloud API - will reload list of backends" \ " in real-time" % __version__) else: logger.info("dockercloud/haproxy %s is unable to access the Docker cloud API - you might want to" \ " give an API role to this service for automatic backend reconfiguration" % __version__) return "cloud" else: link_mode = "new" reason = "" try: try: docker = docker_client() except: docker = docker_client(os.environ) docker.ping() except Exception as e: reason = "unable to connect to docker daemon %s" % e link_mode = "legacy" if link_mode == "new": container_id = os.environ.get("HOSTNAME", "") if not container_id: reason = "unable to get dockercloud/haproxy container ID, is HOSTNAME envvar overwritten?" link_mode = "legacy" else: try: container = docker.inspect_container(container_id) if container.get("HostConfig", {}).get("Links", []): reason = "dockercloud/haproxy container is running on default bridge" link_mode = "legacy" except Exception as e: reason = "unable to get dockercloud/haproxy container inspect information, %s" % e link_mode = "legacy" logger.info("dockercloud/haproxy %s is running outside Docker Cloud" % __version__) if link_mode == "new": logger.info( "New link mode, loading HAProxy definition through docker api") else: logger.info( "Legacy link mode, loading HAProxy definition from environment variables: %s", reason) return link_mode
def detach_auxiliary_project(self, network): if not self.auxiliary_project: return aux = self.get_auxiliary_project() if not os.path.exists(self.lockdir): os.makedirs(self.lockdir) lock = filelock.FileLock("%s/%s" % (self.lockdir, self.auxiliary_project_name)) with lock.acquire(60): client = docker_client(self.environment()) containers = client.containers(filters={ 'label': [ 'org.iamdork.auxiliary.network', 'com.docker.compose.project=%s' % self.auxiliary_project_name ], }) for container in containers: if network in container['NetworkSettings']['Networks']: if (len(container['NetworkSettings']['Networks']) - 1) == len(aux.networks.networks): aux.down(remove_image_type=None, include_volumes=False, remove_orphans=True) break else: client.disconnect_container_from_network(container, network)
def __get_free_subnet(self): client = docker_client(self.env) subnets = [] # List all networks and save them as Subnet object into a list. for network in client.networks(): for config in network['IPAM']['Config']: subnets.append(Subnet(config['Subnet'])) # Find a suitable network, by checking going through all possibilities # until a network doesn't overlaps with an existing one. # todo: Improve algorithm, so that it doesn't searches forever. res = self.default_subnet overlaps = True while overlaps: # Test if the selected network overlaps with existing ones. for subnet in subnets: # if it overlaps, then set overlaps then get the next possible # networks and stop testing against the rest of the existing # networks. if subnet.overlaps(res): overlaps = True res = res.next_net break else: overlaps = False return res
def _init_new_links(): try: docker = docker_client() docker.ping() container_id = os.environ.get("HOSTNAME", "") haproxy_container = docker.inspect_container(container_id) except Exception as e: logger.info("Docker API error, regressing to legacy links mode: ", e) return None links, Haproxy.cls_linked_services = NewLinkHelper.get_new_links( docker, haproxy_container) try: if ADDITIONAL_SERVICES: additional_services = ADDITIONAL_SERVICES.split(",") NewLinkHelper.get_additional_links(docker, additional_services, haproxy_container, links, Haproxy.cls_linked_services) except Exception as e: logger.info("Error loading ADDITIONAL_SERVICES: %s" % str(e)) return None logger.info("Linked service: %s", ", ".join(NewLinkHelper.get_service_links_str(links))) logger.info("Linked container: %s", ", ".join(NewLinkHelper.get_container_links_str(links))) return links
def setUpClass(cls): if engine_version_too_low_for_v2(): version = API_VERSIONS[1] else: version = API_VERSIONS[2] cls.client = docker_client(version)
def test_up_with_networking(self): self.require_api_version('1.21') self.base_dir = 'tests/fixtures/links-composefile' self.dispatch(['--x-networking', 'up', '-d'], None) client = docker_client(version='1.21') services = self.project.get_services() networks = client.networks(names=[self.project.name]) for n in networks: self.addCleanup(client.remove_network, n['Id']) self.assertEqual(len(networks), 1) self.assertEqual(networks[0]['Driver'], 'bridge') network = client.inspect_network(networks[0]['Id']) self.assertEqual(len(network['Containers']), len(services)) for service in services: containers = service.containers() self.assertEqual(len(containers), 1) self.assertIn(containers[0].id, network['Containers']) web_container = self.project.get_service('web').containers()[0] self.assertFalse(web_container.get('HostConfig.Links'))
def test_up_with_networking(self): self.require_api_version('1.21') self.base_dir = 'tests/fixtures/links-composefile' self.dispatch(['--x-networking', 'up', '-d'], None) client = docker_client(version='1.21') services = self.project.get_services() networks = client.networks(names=[self.project.name]) for n in networks: self.addCleanup(client.remove_network, n['Id']) self.assertEqual(len(networks), 1) self.assertEqual(networks[0]['Driver'], 'bridge') network = client.inspect_network(networks[0]['Id']) self.assertEqual(len(network['Containers']), len(services)) for service in services: containers = service.containers() self.assertEqual(len(containers), 1) self.assertIn(containers[0].id, network['Containers']) self.assertEqual(containers[0].get('Config.Hostname'), service.name) web_container = self.project.get_service('web').containers()[0] self.assertFalse(web_container.get('HostConfig.Links'))
def attach_auxiliary_project(self, network): if not self.auxiliary_project: return aux = self.get_auxiliary_project() if not os.path.exists(self.lockdir): os.makedirs(self.lockdir) lock = filelock.FileLock("%s/%s" % (self.lockdir, self.auxiliary_project_name)) with lock.acquire(60): aux.up(detached=True, remove_orphans=True) client = docker_client(self.environment()) containers = client.containers(filters={ 'label': [ 'org.iamdork.auxiliary.network', 'com.docker.compose.project=%s' % self.auxiliary_project_name ], }) for container in containers: if network not in container['NetworkSettings']['Networks']: client.connect_container_to_network(container, network)
def setUpClass(cls): if engine_version_too_low_for_v2(): version = API_VERSIONS[V1] else: version = API_VERSIONS[V2_0] cls.client = docker_client(Environment(), version)
def setUpClass(cls): if engine_version_too_low_for_v2(): version = API_VERSIONS[V1] else: version = API_VERSIONS[V2_0] cls.client = docker_client(version)
def test_up_with_networking(self): self.require_api_version("1.21") self.base_dir = "tests/fixtures/links-composefile" self.dispatch(["--x-networking", "up", "-d"], None) client = docker_client(version="1.21") services = self.project.get_services() networks = client.networks(names=[self.project.name]) for n in networks: self.addCleanup(client.remove_network, n["Id"]) self.assertEqual(len(networks), 1) self.assertEqual(networks[0]["Driver"], "bridge") network = client.inspect_network(networks[0]["Id"]) self.assertEqual(len(network["Containers"]), len(services)) for service in services: containers = service.containers() self.assertEqual(len(containers), 1) self.assertIn(containers[0].id, network["Containers"]) web_container = self.project.get_service("web").containers()[0] self.assertFalse(web_container.get("HostConfig.Links"))
def containers(): """ active containers """ version = API_VERSIONS[V2_0] client = docker_client(Environment(), version) return client.containers()
def polling_service_status_swarm_mode(): while True: time.sleep(config.SWARM_MODE_POLLING_INTERVAL) try: try: docker = docker_client() except: docker = docker_client(os.environ) services = docker.services() tasks = docker.tasks(filters={"desired-state": "running"}) _, linked_tasks = SwarmModeLinkHelper.get_task_links( tasks, services, Haproxy.cls_service_id, Haproxy.cls_nets) if cmp(Haproxy.cls_linked_tasks, linked_tasks) != 0: add_haproxy_run_task("Tasks are updated") except APIError as e: logger.info("Docker API error: %s" % e)
def test_get_network(self): self.require_api_version('1.21') client = docker_client(version='1.21') network_name = 'network_does_exist' project = Project(network_name, [], client) client.create_network(network_name) assert project.get_network()['name'] == network_name
def __init__(self, config): # type: (TargetConfig) -> None self.config = config self.project = Project.from_config( "construi_%s" % self.config.construi.project_name, config.compose, docker_client(os.environ, version="auto"), )
def polling_service_status_swarm_mode(): while True: time.sleep(config.SWARM_MODE_POLLING_INTERVAL) try: try: docker = docker_client() except: docker = docker_client(os.environ) services = docker.services() tasks = docker.tasks(filters={"desired-state": "running"}) _, linked_tasks = SwarmModeLinkHelper.get_task_links(tasks, services, Haproxy.cls_service_id, Haproxy.cls_nets) if cmp(Haproxy.cls_linked_tasks, linked_tasks) != 0: add_haproxy_run_task("Tasks are updated") except APIError as e: logger.info("Docker API error: %s" % e)
def check_link_mode(container_uri, service_uri, api_auth): if container_uri and service_uri and api_auth: if container_uri and service_uri: if api_auth: logger.info("dockercloud/haproxy %s has access to the Docker Cloud API - will reload list of backends" \ " in real-time" % __version__) else: logger.info("dockercloud/haproxy %s is unable to access the Docker cloud API - you might want to" \ " give an API role to this service for automatic backend reconfiguration" % __version__) return "cloud" else: link_mode = "new" reason = "" try: try: docker = docker_client() except: docker = docker_client(os.environ) docker.ping() except Exception as e: reason = "unable to connect to docker daemon %s" % e link_mode = "legacy" if link_mode == "new": container_id = os.environ.get("HOSTNAME", "") if not container_id: reason = "unable to get dockercloud/haproxy container ID, is HOSTNAME envvar overwritten?" link_mode = "legacy" else: try: container = docker.inspect_container(container_id) if container.get("HostConfig", {}).get("Links", []): reason = "dockercloud/haproxy container is running on default bridge" link_mode = "legacy" except Exception as e: reason = "unable to get dockercloud/haproxy container inspect information, %s" % e link_mode = "legacy" logger.info("dockercloud/haproxy %s is running outside Docker Cloud" % __version__) if link_mode == "new": logger.info("New link mode, loading HAProxy definition through docker api") else: logger.info("Legacy link mode, loading HAProxy definition from environment variables: %s", reason) return link_mode
def test_user_agent(self): client = docker_client(os.environ) expected = "docker-compose/{0} docker-py/{1} {2}/{3}".format( compose.__version__, docker.__version__, platform.system(), platform.release() ) assert client.headers['User-Agent'] == expected
def reload_proxy(self): client = docker_client(self.env) containers = client.containers(all=True, filters={ 'label': 'org.iamdork.proxy' }) for container in containers: ex = client.exec_create(container, 'nginx -s reload') client.exec_start(ex)
def setUpClass(self): surveil_dir = os.path.realpath( os.path.join( os.path.dirname(os.path.realpath(__file__)), "../../../../" ) ) compose_file = os.path.join( os.path.dirname(os.path.realpath(__file__)), 'integration.yml' ) project_config = compose_config.from_dictionary( compose_config.load_yaml(compose_file), working_dir=surveil_dir, filename=compose_file ) self.project = compose_project.Project.from_dicts( "surveilintegrationtest", project_config, docker_client.docker_client() ) self.project.kill() self.project.remove_stopped() self.project.build() self.project.up() self.surveil_client = sclient.Client( 'http://localhost:8999/v2', auth_url='http://localhost:8999/v2/auth', version='2_0' ) # Wait until Surveil is available now = time.time() while True: print("Waiting for surveil... %s" % int(time.time() - now)) if time.time() < (now + 380): try: # If 'ws-arbiter' is found, Surveil is ready! configured_hosts = self.surveil_client.status.hosts.list() host_found = False for host in configured_hosts: if host['host_name'].decode() == 'ws-arbiter': host_found = True break if host_found: break except Exception: pass time.sleep(10) else: raise Exception("Surveil could not start")
def _test_py_build(self): key = 'docker_py-build' print key.replace("-", " ") from compose.cli.docker_client import docker_client cli = docker_client() cli.ping() for _ in cli.build(path="context", tag="docker_py_build", rm=True): pass self.branched_history[key] = subprocess.check_output(["docker", "history", "-q", "docker_py_build"]).splitlines() self.branched_history[key].reverse()
def __get_compose_project(self): client = docker_client(Environment()) config_data = config.load( config.ConfigDetails( self.home_path, [config.ConfigFile.from_filename(self.compose_file)])) return DockerComposeProject.from_config(name='metal', client=client, config_data=config_data)
def _test_py_context_build(self): key = 'docker_py-context-build' print key.replace("-", " ") from compose.cli.docker_client import docker_client cli = docker_client() cli.ping() with open("context.tar") as f: for _ in cli.build(tag="docker_py_context_build", fileobj=f, custom_context=True, rm=True): pass self.branched_history[key] = subprocess.check_output(["docker", "history", "-q", "docker_py_context_build"]).splitlines() self.branched_history[key].reverse()
def _init_swarm_mode_links(): try: try: docker = docker_client() except: docker = docker_client(os.environ) docker.ping() except Exception as e: logger.info("Docker API error, regressing to legacy links mode: %s" % e) return None haproxy_container_id = os.environ.get("HOSTNAME", "") Haproxy.cls_service_id, Haproxy.cls_nets = SwarmModeLinkHelper.get_swarm_mode_haproxy_id_nets(docker, haproxy_container_id) links, Haproxy.cls_linked_tasks = SwarmModeLinkHelper.get_swarm_mode_links(docker, Haproxy.cls_service_id, Haproxy.cls_nets) logger.info("Linked service: %s", ", ".join(SwarmModeLinkHelper.get_service_links_str(links))) logger.info("Linked container: %s", ", ".join(SwarmModeLinkHelper.get_container_links_str(links))) return links
def test_run_with_networking(self): self.require_api_version('1.21') client = docker_client(version='1.21') self.base_dir = 'tests/fixtures/simple-dockerfile' self.dispatch(['--x-networking', 'run', 'simple', 'true'], None) service = self.project.get_service('simple') container, = service.containers(stopped=True, one_off=True) networks = client.networks(names=[self.project.name]) for n in networks: self.addCleanup(client.remove_network, n['Id']) self.assertEqual(len(networks), 1) self.assertEqual(container.human_readable_command, u'true')
def test_custom_timeout_error(self): os.environ['COMPOSE_HTTP_TIMEOUT'] = '123' client = docker_client(os.environ) with mock.patch('compose.cli.errors.log') as fake_log: with pytest.raises(errors.ConnectionError): with errors.handle_connection_errors(client): raise errors.RequestsConnectionError( errors.ReadTimeoutError(None, None, None)) assert fake_log.error.call_count == 1 assert '123' in fake_log.error.call_args[0][0]
def test_run_with_networking(self): self.require_api_version("1.21") client = docker_client(version="1.21") self.base_dir = "tests/fixtures/simple-dockerfile" self.dispatch(["--x-networking", "run", "simple", "true"], None) service = self.project.get_service("simple") container, = service.containers(stopped=True, one_off=True) networks = client.networks(names=[self.project.name]) for n in networks: self.addCleanup(client.remove_network, n["Id"]) self.assertEqual(len(networks), 1) self.assertEqual(container.human_readable_command, u"true")
def test_project_up_with_custom_network(self): self.require_api_version('1.21') client = docker_client(version='1.21') network_name = 'composetest-custom' client.create_network(network_name) self.addCleanup(client.remove_network, network_name) web = self.create_service('web', net=Net(network_name)) project = Project('composetest', [web], client, use_networking=True) project.up() assert project.get_network() is None
def _init_new_links(): try: docker = docker_client() docker.ping() container_id = os.environ.get("HOSTNAME", "") haproxy_container = docker.inspect_container(container_id) except Exception as e: logger.info("Docker API error, regressing to legacy links mode: ", e) return None links, Haproxy.cls_linked_services = NewLinkHelper.get_new_links(docker, haproxy_container) logger.info("Linked service: %s", ", ".join(NewLinkHelper.get_service_links_str(links))) logger.info("Linked container: %s", ", ".join(NewLinkHelper.get_container_links_str(links))) return links
def _fetch_project(name, config): """ Wrap the call to Project.from_config as it has side effects :param name: name for the project :type name: str :param config: dictionary configuration :type config: dict :return: the docker-compose Project :rtype: Project """ # get the Project Ready dictionary list. No Working Dir for us config_dicts = from_dictionary(config, working_dir='') return Project.from_dicts(name, config_dicts, docker_client())
def get_project(compose_file=COMPOSE_FILE): from compose import __version__ as compose_version from compose.config import find, load from compose.project import Project from compose.cli.docker_client import docker_client if compose_version.startswith('1.4'): yaml_file = find('.', str(compose_file)) else: # compose >= 1.5 yaml_file = find('.', [str(compose_file)]) config = load(yaml_file) return Project.from_dicts(PROJECT_NAME, config, docker_client())
def polling_service_status_swarm_mode(): while True: time.sleep(config.SWARM_MODE_POLLING_INTERVAL) try: try: docker = docker_client() except: docker = docker_client(os.environ) tasks = docker.tasks(filters={"desired-state": "running"}) linked_tasks = set() for task in tasks: task_nets = [network.get("Network", {}).get("ID", "") for network in task.get("NetworksAttachments", [])] task_service_id = task.get("ServiceID", "") if task_service_id != Haproxy.cls_service_id and Haproxy.cls_nets.intersection(set(task_nets)): task_id = task.get("ID", "") linked_tasks.add(task_id) if Haproxy.cls_linked_tasks != linked_tasks: add_haproxy_run_task("Tasks are updated") except APIError as e: logger.info("Docker API error: %s" % e)
def setUpClass(self): surveil_dir = os.path.realpath( os.path.join(os.path.dirname(os.path.realpath(__file__)), "../../../../")) compose_file = os.path.join( os.path.dirname(os.path.realpath(__file__)), 'integration.yml') project_config = compose_config.from_dictionary( compose_config.load_yaml(compose_file), working_dir=surveil_dir, filename=compose_file) self.project = compose_project.Project.from_dicts( "surveilintegrationtest", project_config, docker_client.docker_client()) self.project.kill() self.project.remove_stopped() self.project.build() self.project.up() self.surveil_client = sclient.Client( 'http://localhost:8999/v2', auth_url='http://localhost:8999/v2/auth', version='2_0') # Wait until Surveil is available now = time.time() while True: print("Waiting for surveil... %s" % int(time.time() - now)) if time.time() < (now + 380): try: # If 'ws-arbiter' is found, Surveil is ready! configured_hosts = self.surveil_client.status.hosts.list() host_found = False for host in configured_hosts: if host['host_name'].decode() == 'ws-arbiter': host_found = True break if host_found: break except Exception: pass time.sleep(10) else: raise Exception("Surveil could not start")
def _init_new_links(): try: docker = docker_client() docker.ping() container_id = os.environ.get("HOSTNAME", "") haproxy_container = docker.inspect_container(container_id) except Exception as e: logger.info("Docker API error, regressing to legacy links mode: ", e) return None links, Haproxy.cls_linked_services = NewLinkHelper.get_new_links( docker, haproxy_container) logger.info("Linked service: %s", ", ".join(NewLinkHelper.get_service_links_str(links))) logger.info("Linked container: %s", ", ".join(NewLinkHelper.get_container_links_str(links))) return links
def test_up_without_networking(self): self.require_api_version("1.21") self.base_dir = "tests/fixtures/links-composefile" self.dispatch(["up", "-d"], None) client = docker_client(version="1.21") networks = client.networks(names=[self.project.name]) self.assertEqual(len(networks), 0) for service in self.project.get_services(): containers = service.containers() self.assertEqual(len(containers), 1) self.assertNotEqual(containers[0].get("Config.Hostname"), service.name) web_container = self.project.get_service("web").containers()[0] self.assertTrue(web_container.get("HostConfig.Links"))
def test_up_without_networking(self): self.require_api_version('1.21') self.base_dir = 'tests/fixtures/links-composefile' self.dispatch(['up', '-d'], None) client = docker_client(version='1.21') networks = client.networks(names=[self.project.name]) self.assertEqual(len(networks), 0) for service in self.project.get_services(): containers = service.containers() self.assertEqual(len(containers), 1) self.assertNotEqual(containers[0].get('Config.Hostname'), service.name) web_container = self.project.get_service('web').containers()[0] self.assertTrue(web_container.get('HostConfig.Links'))
def get_project(self): # Dont reuse the client to fix this bug : https://github.com/docker/compose/issues/1275 client = docker_client(Environment()) project = Project.from_config(self.name, self.cd, client) return project
def __init__(self, config): self.config = config self.project = Project.from_config( "construi_%s" % self.config.construi['project_name'], config.compose, docker_client(os.environ, version='auto'))
def setUpClass(cls): version = API_VERSIONS[engine_max_version()] cls.client = docker_client(Environment(), version)
def test_user_agent(self): client = docker_client(os.environ, version=DEFAULT_DOCKER_API_VERSION) expected = "docker-compose/{} docker-py/{} {}/{}".format( compose.__version__, docker.__version__, platform.system(), platform.release()) assert client.headers['User-Agent'] == expected
def client(): """ docker client """ return docker_client(Environment(), API_VERSIONS[COMPOSEFILE_V3_0])
def test_docker_client_with_custom_timeout(self): os.environ['COMPOSE_HTTP_TIMEOUT'] = '123' client = docker_client(os.environ, version=DEFAULT_DOCKER_API_VERSION) assert client.timeout == 123