def test(shipment): """Use this function as a skeleton for deploing Elasticsearch cluster.""" shipment.unload_ships() ships = shipment.ships.values() zookeepers = build_zookeeper_cluster(filter_quorum_ships(ships)) elasticsearches = [] for ship in ships: elasticsearch = create_elasticsearch(clustername='testcluster', version='1.4.1') kibana = create_kibana() nginx = create_nginx_proxy() # Place all containers on the same ship ship.place(elasticsearch) ship.place(kibana) ship.place(nginx) # Adjust memory to prevent OOM when running on the laptop elasticsearch.memory = min(ship.memory, 128*1024*1024) # We will use Zookeeper for master discovery attach_zookeepers_to_elasticsearch(elasticsearch, zookeepers) # Nginx will terminate https and proxy requests to Elasticsearch and Kibana attach_upstreams_to_nginx(nginx, upstreams=[ (elasticsearch.doors['http'], 9200, 9443), (kibana.doors['http'], 8080, 8443), ]) # Expose nginx ports before attaching it to Kibana nginx.expose_ports(list(range(2000, 4000))) # Kibana should use the same origin and just change port to access Elasticsearch attach_elasticsearch_to_kibana(kibana, httpdoor=nginx.doors['elasticsearch.http'], httpsdoor=nginx.doors['elasticsearch.https']) # Save elasticsearches to clusterize them later elasticsearches.append(elasticsearch) # Let Elasticsearches know about each other clusterize_elasticsearches(elasticsearches) # Expose all unexposed ports shipment.expose_ports(list(range(51000, 51100)))
def test(shipment): from obedient.zookeeper import build_zookeeper_cluster shipment.unload_ships() zookeepers = build_zookeeper_cluster(shipment.ships.values()) builder = make_powny_builder() for ship in shipment.ships.values(): gitapi = builder.gitapi(ssh_keys=[]) api = builder.api() worker = builder.worker() collector = builder.collector() for powny in [api, worker, collector]: attach_zookeepers_to_powny(powny, zookeepers) for container in [api, worker, collector, gitapi]: ship.place(container) shipment.expose_ports(list(range(47000, 47100)))