def register(self, docker_container, docker_image, port_protocol, port_host, port_container, volume_name, volume_path, container_path): """ Register a task definition in a AWS ECS cluster. :param docker_container: Short name to assign to the docker container :param docker_image: Docker image name :param port_protocol: Port protocol to use :param port_host: Host port # :param port_container: Container port # :param volume_name: Short name you'd like to assign to a volume :param volume_path: Path to the volume relative to instance root (not docker) :param container_path: Path to map the volume to within the container """ task_def = dict() # Container definitions task_def['containerDefinitions'] = [] task_def['containerDefinitions'].append({ "memory": 128, "portMappings": [{ "hostPort": int(port_host), "containerPort": int(port_container), "protocol": port_protocol }], "essential": True, "mountPoints": [{ "containerPath": container_path, "sourceVolume": volume_name }], "name": docker_container, "image": docker_image }) # Volumes task_def['volumes'] = [] task_def['volumes'].append({ "host": { "sourcePath": volume_path }, "name": volume_name }) # Task family name task_def['family'] = self.task_name with NamedTemporaryFile(suffix=self.task_name + '.json') as temp: # Write task definition JSON(temp.name).write(task_def, sort_keys=False, indent=2) print('Registering task definition') cmd = 'aws ecs register-task-definition --cli-input-json file://{0}'.format( temp.name) os.system(cmd) self.add_task('Registered task definition')
def create(self): """Create a Dockerrun.aws.json file in the default directory with default data.""" if not os.path.exists(os.path.dirname(os.path.join(self.path))): os.mkdir(os.path.dirname(os.path.join(self.path))) JSON(os.path.join(self.path)).write(self.data, sort_keys=False, indent=2) self.add_task( 'Make Dockerrun.aws.json file with default deployment config')
def init_json_store(root=_HISTORY_JSON_ROOT, json=DOCKER_HISTORY_JSON, key='history'): # Make root if it doesn't exist if not os.path.exists(root): os.mkdir(root) # Make json file if it doesn't exist if not os.path.isfile(json): JSON(json).write({key: []})
def get_json(json_path): """ Retrieve a JSON object ready to read and write history files. Create a history json if it does not exist and return a JSON object to write to. :param json_path: Path to history.json file :return: JSON object """ json = JSON(json_path) if not os.path.exists(json_path): json.write({'history': []}) return json
def json(self): """Return JSON object with morning_pull.json available.""" if not self._json: self._json = JSON(self.json_path) return self._json
def setUp(self): self.temp = NamedTemporaryFile(suffix='.json', delete=False) self.json = JSON(self.temp.name)
def get_config(): # Make JSON file if it doesn't exist if not config_exists(): JSON(PYPI_JSON_PATH).write(PYPI_JSON_DEFAULT) print('\tadd projects to pypi.json') return JSON(PYPI_JSON_PATH).read()
def most_recent_history(json_path): """Get the most recent deployment parameters from history.json.""" try: return JSON(json_path).read()['history'][-1] except IndexError: return dict()
import os from databasetools import JSON _HISTORY_JSON_ROOT = os.path.join(os.path.expanduser('~'), '.Deployer') EB_HISTORY_JSON = os.path.join(_HISTORY_JSON_ROOT, 'eb_history.json') ECS_HISTORY_JSON = os.path.join(_HISTORY_JSON_ROOT, 'ecs_history.json') DOCKER_HISTORY_JSON = os.path.join(_HISTORY_JSON_ROOT, 'docker_history.json') # Create '.Deployer' folder in the home directory if it doesn't exist if not os.path.exists(_HISTORY_JSON_ROOT): os.mkdir(_HISTORY_JSON_ROOT) # Create each history file if they don't exist for history in (EB_HISTORY_JSON, ECS_HISTORY_JSON, DOCKER_HISTORY_JSON): if not os.path.exists(history): JSON(history).write({'history': []}, sort_keys=False) REMOTE_SOURCE_EXT = '-remote' HOST_PORT = 80 CONTAINER_PORT = 80 AWS_REGION = 'us-east-1' LAUNCH_TYPES = ('EC2', 'FARGATE') AWS_VERSION_DESCRIPTION = 'EB-CLI deploy'