def __docker_build_tmp_files_cleanup(self, base_path, file_path, tmp_build):
     if os.path.isdir(file_path):
         dest = os.path.join(base_path, f"{os.path.basename(file_path)}.zip")
     else:
         dest = os.path.join(base_path, os.path.basename(file_path))
     os.remove(dest)
     Prompt.notice(f"Removed build artifact for image: {tmp_build['image']} - {dest}")
    def validate(self):
        with open(self.options['config'], 'r') as f:
            self.config = json.load(f)
        self.config['ROOT_DIR'] = os.getcwd()
        missing = []
        for item in self.validation_config:
            key = item['key']
            required = item['required']
            key_found = key in self.config or key in os.environ

            if not self.config.get(key):
                self.config[key] = os.environ.get(key, "")
            if (not key_found and required) or (key_found and required and not self.config[key]):
                missing.append(item)
            elif not required and not self.config[key]:
                warning = f"Config missing optional field: {key} - {Colors.WARNING}{item['description']}"
                if item.get('default-message'):
                    warning += f" - {Colors.CYAN}{item['default-message']}"
                Prompt.notice(warning)
        if missing:
            missing_formatted = [f"{x['key']}: {x['description']}" for x in missing]
            Prompt.error(f"The following keys are missing/empty from your env or config file: {missing_formatted}",
                         close=True)

        # Prepares build files if provided
        super().docker_tmp_file_handler(self.config, self.TMP_BUILD_FILES)
 def signal_handler(self, sig, frame):
     Prompt.notice("\nCtrl-c captured.  Executing teardown function.")
     if not self.kill_captured:
         self.kill_captured = True
         self.cleanup()
         self.on_sig_kill()
     sys.exit(0)
 def __docker_build_tmp_files_copy(self, base_path, file_path, tmp_build, config, key):
     if os.path.isdir(file_path):
         dest = make_archive(os.path.join(base_path, os.path.basename(file_path)), 'zip', file_path)
         config[f"BUILD_FILE_{key}"] = f"{os.path.basename(file_path)}.zip"
     else:
         dest = os.path.join(base_path, os.path.basename(file_path))
         copyfile(file_path, dest)
         config[f"BUILD_FILE_{key}"] = os.path.basename(dest)
     Prompt.notice(f"Copied build file for image: {tmp_build['image']} - {dest}")
 def check_ports(self):
     Prompt.notice(f"Checking if ports are available for deployment: {self.REQUIRED_PORTS}")
     import socket
     ports_in_use = []
     for port in self.REQUIRED_PORTS:
         with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock:
             if sock.connect_ex(('127.0.0.1', port)) == 0:
                 ports_in_use.append(port)
     if ports_in_use:
         Prompt.error(f"Cannot deploy.  The following ports are in use: {ports_in_use}", close=True)
 def docker_tmp_file_handler(self, config, docker_tmp_build_files, copy=True):
     for tmp_build in docker_tmp_build_files:
         for key in tmp_build['keys']:
             file_path = config[key]
             if file_path:
                 if not os.path.exists(file_path):
                     Prompt.error(f"{os.path.abspath(file_path)} does not exist.", close=True)
                 base_path = os.path.join(config['ROOT_DIR'], 'images', tmp_build['image'])
                 if copy:
                     self.__docker_build_tmp_files_copy(base_path, file_path, tmp_build, config, key)
                 else:
                     self.__docker_build_tmp_files_cleanup(base_path, file_path, tmp_build)
Example #7
0
    def run(self):
        self.set_default('GIT_URL',
                         "https://github.com/GovReady/govready-q.git")
        self.set_default(
            'ADMINS',
            [] if not self.config.get('ADMINS') else self.config.get('ADMINS'))
        self.set_default('MOUNT_FOLDER', os.path.abspath("../../volumes"))
        self.set_default('HTTPS', "true")
        self.set_default('DEBUG', "false")

        if self.check_if_valid_uri(self.config['ADDRESS']):
            Prompt.error(
                f"ADDRESS cannot be a valid URI.  It must be the <domain>:<port> only.  No protocol or path.  "
                f"{self.config['ADDRESS']} is invalid.",
                close=True)

        self.set_default('HOST', self.config['ADDRESS'].split(':')[0])
        self.set_default('HEALTH_CHECK_GOVREADY_Q',
                         f"http://{self.config['HOST']}:8000")

        using_internal_db = self.set_default(
            'DATABASE_CONNECTION_STRING',
            "postgres://*****:*****@postgres:5432/govready_q")
        self.set_default(
            'DB_ENGINE',
            self.config['DATABASE_CONNECTION_STRING'].split(':')[0])
        docker_compose_file = "docker-compose.yaml"
        if not using_internal_db:
            docker_compose_file = 'docker-compose.external-db.yaml'

        self.execute(
            cmd=
            f"docker-compose -f {docker_compose_file} down --remove-orphans  --rmi all"
        )
        self.check_ports()
        self.execute(
            cmd=f"docker-compose -f {docker_compose_file} build --parallel",
            show_env=True)
        self.execute(cmd=f"docker-compose -f {docker_compose_file} up -d",
                     show_env=True)
 def execute(self, cmd, env_dict, display_stdout=True, on_error_fn=None, show_env=False):
     env = os.environ.copy()
     normalized_dict = {}
     for key, value in env_dict.items():
         if isinstance(value, (list, dict)):
             value = json.dumps(value)
         if value is None:
             value = ""
         normalized_dict[key] = value
     env.update(normalized_dict)
     output = ""
     Prompt.notice(f"Executing command: {Colors.WARNING}{cmd}")
     if show_env:
         Prompt.notice(f"Environment Variables: {json.dumps(env_dict, indent=4, sort_keys=True)}")
     with subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,  # stderr=subprocess.STDOUT,
                           bufsize=0, env=env) as proc:
         for line in proc.stdout:
             formatted = line.rstrip().decode('utf-8', 'ignore')
             output += formatted
             if display_stdout:
                 print(formatted)
     if proc.returncode != 0:
         if on_error_fn:
             on_error_fn()
         Prompt.error(f"[{cmd}] Failed [code:{proc.returncode}]- {proc.stderr}", close=True)
     return output
Example #9
0
    def on_complete(self):
        logs = self.execute(cmd=f"docker-compose logs", display_stdout=False)
        auto_admin = re.findall(
            'Created administrator account \(username: (admin)\) with password: (.*?)\x1b',
            logs)
        print()

        if auto_admin:
            Prompt.warning(
                f"Created Administrator Account - {Colors.CYAN}{auto_admin[0][0]} / {auto_admin[0][1]} - {Colors.FAIL} This is the only time you will see this message so make sure to write this down!"
            )
        Prompt.warning(
            f"Logs & Container Artifacts can be found in: {Colors.CYAN}{self.config['MOUNT_FOLDER']}"
        )
        Prompt.warning(
            f"Access application via Browser: {Colors.CYAN}https://{self.config['HOST']}"
        )
Example #10
0
 def on_complete(self):
     print()
     Prompt.warning(
         f"If you're not using an external database and would like to wipe your DB, run: {Colors.CYAN}docker volume rm onprem_pg-data"
     )
Example #11
0
def get_deployment_type(deployment_type_list):
    Prompt.question_banner("Deployment Choices:", True)
    Prompt.banner(deployment_type_list, prevent_all_option=True)
    return deployment_type_list[
        Prompt.get_response("Choice: ", deployment_type_list) - 1]
Example #12
0
def run(options):
    path = f"deployments.{options['type']}.{options['action']}"

    if options['action'] == 'init':
        validator_config = os.path.join(
            f"{os.path.sep}".join(path.split('.')[:-1]),
            'config-validator.json')
        skeleton = {}
        with open(validator_config, 'r') as f:
            for row in json.load(f):
                skeleton[row['key']] = ""
        with open("configuration.json", 'w') as f:
            json.dump(skeleton, f, indent=4, sort_keys=True)
        return

    importlib.import_module(path)
    Prompt.warning(
        f"Attempting to [{options['action']}] using the [{options['type']}] method"
    )
    if options['action'] == 'deploy':
        validator_config = os.path.join(
            f"{os.path.sep}".join(path.split('.')[:-1]),
            'config-validator.json')
        if not options['config']:
            Prompt.error(
                f"Deployments require a configuration json that satisfies: [{validator_config}]",
                close=True)
        deployment_classes = Deployment.__subclasses__()
        if not deployment_classes:
            Prompt.error(
                f"Unable to find class inheriting `Deployment` in {path}",
                close=True)
        if not os.path.exists(validator_config):
            Prompt.error(
                f"File does not exist: {validator_config}.  Each deployment type must have this file to validate required values for deployment.",
                close=True)
        deployment = deployment_classes[0](options, validator_config)
        deployment.validate()
        os.chdir(f"deployments/{options['type']}")
        try:
            deployment.run()
        finally:
            deployment.cleanup()
        deployment.on_complete()
        Prompt.success(
            f"Deployment complete - using the [{options['type']}] method")
    else:
        deployment_classes = UnDeploy.__subclasses__()
        if not deployment_classes:
            Prompt.error(
                f"Unable to find class inheriting `UnDeploy` in {path}",
                close=True)
        remove_deployment = deployment_classes[0](options)
        os.chdir(f"deployments/{options['type']}")
        try:
            remove_deployment.run()
        finally:
            remove_deployment.cleanup()

        remove_deployment.on_complete()
        Prompt.success(
            f"Deployment removed - using the [{options['type']}] method")
Example #13
0
    parser.add_argument('action',
                        help='The action to take (init, deploy, undeploy)')
    parser.add_argument('--config',
                        help='Config file - used to deploy process',
                        required=False)
    parser.add_argument(
        '--type',
        help=
        "(Optional) Deployment type; It will prompt you if you don't include.",
        required=False)
    args = vars(parser.parse_args())

    valid_actions = ['deploy', 'undeploy', 'init']
    if args['action'] not in valid_actions:
        Prompt.error(
            f"{args['action']} is not a valid choice.  Choices: {valid_actions}",
            close=True)

    deployment_types = sorted([
        y
        for y in [x[0].split(os.path.sep)[-1]
                  for x in os.walk('deployments')][1:]
        if not y.startswith('__')
    ])
    if not args['type']:
        args['type'] = get_deployment_type(deployment_types)
    if args['type'] not in deployment_types:
        Prompt.error(
            f"{args['type']} is not a valid choice.  Choices: {deployment_types}",
            close=True)