Ejemplo n.º 1
0
class Init(SkyTask):
    def __init__(self, all_args=None, runner_cfg=None):
        SkyTask.__init__(self, all_args, runner_cfg)
        self.logger = Logger(logging.getLogger(__name__), logging.INFO)
        self.name = 'pack.init'
        self.args = all_args
        self.runner_cfg = runner_cfg

    def execute(self):
        result_string = ""
        if self.args['base_dir']:
            base_dir = self.args['base_dir']
            if base_dir.split('/')[-1] is not 'skybase':
                base_dir = os.path.join(base_dir, 'skybase')
        else:
            base_dir = os.path.join(os.getcwd(), 'skybase')

        # Hardcoding default to True for pack group command, revisit later
        self.args['apply'] = True
        if self.args['apply']:
            self.logger.write('Initializing package in ' + base_dir, multi_line=False)
            result_string += pack_actions.init_from_schemas(base_dir, 'artiball', dry_run=False,
                                                            force=self.args['force'])
        else:
            self.logger.write('DRY RUN :::: Initializing package in ' + base_dir, multi_line=False)
            result_string += pack_actions.init_from_schemas(base_dir, 'artiball', dry_run=True)
        self.result.status = sky_cfg.API_STATUS_SUCCESS
        self.result.output = result_string
        return self.result

    def preflight_check(self):
        result = skytask.TaskResult()
        return result
Ejemplo n.º 2
0
class Create(SkyTask):
    def __init__(self, all_args=None, runner_cfg=None):
        SkyTask.__init__(self, all_args, runner_cfg)
        self.logger = Logger(logging.getLogger(__name__), logging.WARNING)
        self.name = 'pack.create'
        self.args = all_args
        self.runner_cfg = runner_cfg

    def execute(self):
        if self.args['base_dir']:
            base_dir = self.args['base_dir']
            if base_dir.split('/')[-1] is not 'skybase':
                base_dir = os.path.join(base_dir, 'skybase')
        else:
            base_dir = os.path.join(os.getcwd(), 'skybase')

        artiball = ab_object.Artiball(base_dir)

        if self.args['build_id']:
            artiball.create_manifest_file(self.args['build_id'])
        else:
            artiball.create_manifest_file('NoBuildID')
        artiball.update_content()

        # Hardcoding default to True for pack group command, revisit later
        self.args['apply'] = True
        if self.args['apply']:
            if self.args['verbose']:
                self.logger.write('Creating package in ' + base_dir, multi_line=False)
            pack_result = pack_actions.pack(artiball.base_dir, artiball.app_source, artiball.chef_repo,
                                            artiball.chef_repo_branch, artiball.cookbooks, artiball.use_berkshelf,
                                            artiball.databags, artiball.encrypted_databags,artiball.manifest,
                                            dry_run=False, verbose=self.args['verbose'])
        else:
            if self.args['verbose']:
                self.logger.write('DRY RUN :::: Dry running steps for package creation in ' + base_dir,
                                  multi_line=False)
            pack_result = pack_actions.pack(artiball.base_dir, artiball.app_source, artiball.chef_repo,
                                            artiball.chef_repo_branch, artiball.cookbooks, artiball.use_berkshelf,
                                            artiball.databags, artiball.encrypted_databags, artiball.manifest,
                                            dry_run=True, verbose=self.args['verbose'])

        if pack_result["valid"]:
            self.result.status = sky_cfg.API_STATUS_SUCCESS
        else:
            self.result.status = sky_cfg.API_STATUS_FAIL

        self.result.output += pack_result["result_string"]
        return self.result

    def preflight_check(self):
        result = skytask.TaskResult()
        validate = skytask.get_task_class_by_name('pack.validate')(self.args)
        result = validate.execute()
        if result.status == sky_cfg.API_STATUS_FAIL:
            result.output += "Invalid content for packing, please correct accordingly.\n"
            result.status = 'FAIL'
        return result
Ejemplo n.º 3
0
class UpdateEnvironment(SkyTask):
    def __init__(self, all_args=None, runner_cfg=None):
        SkyTask.__init__(self, all_args, runner_cfg)
        self.logger = Logger(logging.getLogger(__name__), logging.INFO)
        self.name = 'chef.update_environment'
        self.args = all_args
        self.runner_cfg = runner_cfg

    def execute(self):
        planet_data_dir = self.runner_cfg.data['planet_data_dir']
        knife_env_path = os.path.join(planet_data_dir, self.args['planet'])
        knife_config_path = os.path.join(knife_env_path, 'chef',
                                         self.args['planet'] + '.knife.rb')

        if os.path.exists(knife_config_path):
            planet_env_yaml = os.path.join(knife_env_path,
                                           self.args['planet'] + '.yaml')
            with open(planet_env_yaml, 'r') as f:
                planet_env_attr = yaml.load(f)
                chef_env_attr = planet_env_attr['services']['chefserver'][
                    'chef_environment']['default_attributes']
                chef_env_final = chef_env_attr.copy()
                chef_env_final.update(planet_env_attr)
                self.logger.write('Updating environment in planet ' +
                                  self.args['planet'],
                                  multi_line=False)
                update_environment_result = sky_chef_actions.environment_create(
                    knife_env_path, knife_config_path, chef_env_final)
            f.close()
            if update_environment_result["valid"]:
                self.result.status = sky_cfg.API_STATUS_SUCCESS
            else:
                self.result.status = sky_cfg.API_STATUS_FAIL
        else:
            self.logger.write("Cannot locate planet knife config " +
                              knife_config_path + ", please confirm it exists",
                              multi_line=False)
            self.result.status = sky_cfg.API_STATUS_FAIL

        self.result.output += update_environment_result["result_string"]
        return self.result

    def preflight_check(self):
        result = skytask.TaskResult()
        if self.args['planet'] is None:
            result.output += "Missing planet argument, please use -p to specify.\n"
            result.status = 'FAIL'
        return result
Ejemplo n.º 4
0
 def __init__(self, all_args=None, runner_cfg=None):
     SkyTask.__init__(self, all_args, runner_cfg)
     self.logger = Logger(logging.getLogger(__name__), logging.INFO)
     self.name = 'user.list'
     self.args = all_args
     self.runner_cfg = runner_cfg
     self.username = self.args.get('username')
Ejemplo n.º 5
0
 def __init__(self, all_args=None, runner_cfg=None):
     SkyTask.__init__(self, all_args, runner_cfg)
     self.logger = Logger(logging.getLogger(__name__), logging.INFO)
     self.name = 'planet.describe'
     self.args = all_args
     self.runner_cfg = runner_cfg
     self.planet = None
Ejemplo n.º 6
0
 def __init__(self, all_args=None, runner_cfg=None):
     SkyTask.__init__(self, all_args, runner_cfg)
     self.logger = Logger(logging.getLogger(__name__), logging.INFO)
     self.name = 'service.delete_stacks_state'
     self.args = all_args
     self.runner_cfg = runner_cfg
     self.planet = None
     self.runtime = SkyRuntime(apply=all_args.get('apply', False))
Ejemplo n.º 7
0
class Validate(SkyTask):
    def __init__(self, all_args=None, runner_cfg=None):
        SkyTask.__init__(self, all_args, runner_cfg)
        self.logger = Logger(logging.getLogger(__name__), logging.INFO)
        self.name = 'pack.validate'
        self.args = all_args
        self.runner_cfg = runner_cfg

    def execute(self):
        if self.args['base_dir']:
            base_dir = self.args['base_dir']
            if base_dir.split('/')[-1] is not 'skybase':
                base_dir = os.path.join(base_dir, 'skybase')
        else:
            base_dir = os.path.join(os.getcwd(), 'skybase')

        if self.args['verbose']:
            self.logger.write('Validating package in ' + base_dir, multi_line=False)

        if not os.path.exists(os.path.join(base_dir, 'app')):
            dir_util.mkpath(os.path.join(base_dir, 'app'))
        validate_result = pack_actions.validate_with_schema(base_dir, 'artiball')
        artiball = ab_object.Artiball(base_dir)
        artiball.update_content()
        artiball.create_manifest_file()
        if (artiball.chef_repo is None) and \
                (os.listdir(os.path.join(base_dir, 'installation', 'chef', 'cookbooks')) == []) and \
                artiball.cookbooks:
                self.result.output += "Cannot render cookbooks. Please specify git repository URL in skybase.yaml " \
                                      "or make sure cookbooks are in " + \
                                      os.path.join(base_dir, 'installation', 'chef', 'cookbooks') + '.\n'
                self.result.status = sky_cfg.API_STATUS_FAIL
                return self.result
        if validate_result["valid"]:
            self.result.output += "All content validated, ready for pack create.\n"
            self.result.status = sky_cfg.API_STATUS_SUCCESS
        else:
            self.result.status = sky_cfg.API_STATUS_FAIL
        self.result.output += validate_result["result_string"]
        if not self.args['verbose']:
            self.result.output=''
        return self.result

    def preflight_check(self):
        result = skytask.TaskResult()
        return result
Ejemplo n.º 8
0
 def __init__(self, all_args=None, runner_cfg=None):
     SkyTask.__init__(self, all_args, runner_cfg)
     self.logger = Logger(logging.getLogger(__name__), logging.INFO)
     self.name = 'state.read'
     self.args = all_args
     self.runner_cfg = runner_cfg
     self.mode = self.args.get('exec_mode')
     self.format = self.args.get('format')
     self.id = self.args.get('skybase_id')
Ejemplo n.º 9
0
    def __init__(self, all_args=None, runner_cfg=None):
        SkyTask.__init__(self, all_args, runner_cfg)
        self.logger = Logger(logging.getLogger(__name__), logging.INFO)
        self.name = 'service.record_state'
        self.args = all_args
        self.runner_cfg = runner_cfg

        self.planet = None
        self.stacks = self.args['stacks']
Ejemplo n.º 10
0
    def __init__(self, base_dir=None):
        logging.basicConfig(level=logging.INFO)
        self.logger = Logger(logging.getLogger(__name__), logging.INFO)

        self.base_dir = base_dir
        self.yaml_files = []

        self.app_source = None
        self.cookbooks = []
        self.chef_repo = None
        self.chef_repo_branch = None
        self.use_berkshelf = False
        self.databags = []
        self.encrypted_databags = []

        self.manifest = {}

        self.initialize()
Ejemplo n.º 11
0
    def __init__(self, all_args=None, runner_cfg=None):
        SkyTask.__init__(self, all_args, runner_cfg)
        self.logger = Logger(logging.getLogger(__name__), logging.INFO)
        self.name = 'user.update'
        self.args = all_args
        self.runner_cfg = runner_cfg

        self.apply = self.args.get('apply')
        self.username = self.args.get('username')
        self.role = self.args.get('role')
        self.email = self.args.get('email')
Ejemplo n.º 12
0
    def __init__(self, all_args=None, runner_cfg=None):
        SkyTask.__init__(self, all_args, runner_cfg)
        self.logger = Logger(logging.getLogger(__name__), logging.INFO)
        self.name = 'user.reset_secret'
        self.args = all_args
        self.runner_cfg = runner_cfg
        self.apply = self.args.get('apply')

        # initialize command options as attributes
        self.username = self.args.get('username')
        # if --secret not provide, generate a new one
        self.secret = self.args.get('secret')
Ejemplo n.º 13
0
class Init(SkyTask):
    def __init__(self, all_args=None, runner_cfg=None):
        SkyTask.__init__(self, all_args, runner_cfg)
        self.logger = Logger(logging.getLogger(__name__), logging.INFO)
        self.name = 'pack.init'
        self.args = all_args
        self.runner_cfg = runner_cfg

    def execute(self):
        result_string = ""
        if self.args['base_dir']:
            base_dir = self.args['base_dir']
            if base_dir.split('/')[-1] is not 'skybase':
                base_dir = os.path.join(base_dir, 'skybase')
        else:
            base_dir = os.path.join(os.getcwd(), 'skybase')

        # Hardcoding default to True for pack group command, revisit later
        self.args['apply'] = True
        if self.args['apply']:
            self.logger.write('Initializing package in ' + base_dir,
                              multi_line=False)
            result_string += pack_actions.init_from_schemas(
                base_dir, 'artiball', dry_run=False, force=self.args['force'])
        else:
            self.logger.write('DRY RUN :::: Initializing package in ' +
                              base_dir,
                              multi_line=False)
            result_string += pack_actions.init_from_schemas(base_dir,
                                                            'artiball',
                                                            dry_run=True)
        self.result.status = sky_cfg.API_STATUS_SUCCESS
        self.result.output = result_string
        return self.result

    def preflight_check(self):
        result = skytask.TaskResult()
        return result
Ejemplo n.º 14
0
class Clean(SkyTask):
    def __init__(self, all_args=None, runner_cfg=None):
        SkyTask.__init__(self, all_args, runner_cfg)
        self.logger = Logger(logging.getLogger(__name__), logging.INFO)
        self.name = 'pack.clean'
        self.args = all_args
        self.runner_cfg = runner_cfg

    def execute(self):
        if self.args['base_dir']:
            base_dir = self.args['base_dir']
            if base_dir.split('/')[-1] is not 'skybase':
                base_dir = os.path.join(base_dir, 'skybase')
        else:
            base_dir = os.path.join(os.getcwd(), 'skybase')

        # Hardcoding default to True for pack group command, revisit later
        self.args['apply'] = True
        if self.args['apply']:
            self.logger.write('Cleaning packing environment in ' + base_dir, multi_line=False)
            clean_result = pack_actions.clean(base_dir, dry_run=False, force=self.args['force'])
        else:
            self.logger.write('DRY RUN :::: Dry running steps for environment cleanup in ' + base_dir, multi_line=False)
            clean_result = pack_actions.clean(base_dir, dry_run=True)

        if clean_result["valid"]:
            self.result.status = sky_cfg.API_STATUS_SUCCESS
        else:
            self.result.status = sky_cfg.API_STATUS_FAIL
        self.result.status = sky_cfg.API_STATUS_FAIL
        self.result.output += clean_result["result_string"]
        return self.result

    def preflight_check(self):
        result = skytask.TaskResult()
        return result
Ejemplo n.º 15
0
    def __init__(self, all_args=None, runner_cfg=None):
        SkyTask.__init__(self, all_args, runner_cfg)
        self.logger = Logger(logging.getLogger(__name__), logging.INFO)
        self.name = 'service.delete_stacks'
        self.args = all_args
        self.runner_cfg = runner_cfg
        self.planet = None

        # create runtime object with command options
        self.runtime = SkyRuntime(tag=all_args.get('tag'),
                                  apply=all_args.get('apply', False))

        # initialize stack deletion process drivers
        self.stack_deletion_list = []
        self.stack_deletion_info = dict()
Ejemplo n.º 16
0
    def __init__(self, all_args=None, runner_cfg=None):
        SkyTask.__init__(self, all_args, runner_cfg)
        self.logger = Logger(logging.getLogger(__name__), logging.INFO)
        self.name = 'service.deploy'
        self.args = all_args
        self.runner_cfg = runner_cfg

        # required attributes derived from command arguments
        self.planet = None
        self.service = None

        # create runtime object with command options
        self.runtime = SkyRuntime(tag=all_args.get('tag'),
                                  apply=all_args.get('apply', False))

        self.system = SkySystem()
Ejemplo n.º 17
0
    def __init__(self, base_dir=None):
        logging.basicConfig(level=logging.INFO)
        self.logger = Logger(logging.getLogger(__name__), logging.INFO)

        self.base_dir = base_dir
        self.yaml_files = []

        self.app_source = None
        self.cookbooks = []
        self.chef_repo = None
        self.chef_repo_branch = None
        self.use_berkshelf = False
        self.databags = []
        self.encrypted_databags = []

        self.manifest = {}

        self.initialize()
Ejemplo n.º 18
0
    def __init__(self, all_args, runner_cfg):
        SkyTask.__init__(self, all_args, runner_cfg)
        self.logger = Logger(logging.getLogger(__name__), logging.INFO)
        self.name = 'pack.submit'
        self.args = all_args
        self.runner_cfg = runner_cfg
        self.aws_access_key_id = None
        self.aws_secret_access_key = None

        if self.args['base_dir']:
            self.base_dir = self.args['base_dir']
            if self.base_dir.split('/')[-1] is not 'skybase':
                self.base_dir = os.path.join(self.base_dir, 'skybase')
        else:
            self.tdir = tempfile.mkdtemp()
            self.base_dir = os.path.join(self.tdir, 'skybase')
        self.tmp_dir = os.path.join(self.base_dir, 'tmp')
        if not os.path.exists(self.tmp_dir):
            os.makedirs(self.tmp_dir)
Ejemplo n.º 19
0
    def __init__(self, all_args=None, runner_cfg=None):
        SkyTask.__init__(self, all_args, runner_cfg)
        self.logger = Logger(logging.getLogger(__name__), logging.INFO)
        self.name = 'service.update'
        self.args = all_args
        self.runner_cfg = runner_cfg

        # required attributes derived from command arguments
        self.mode = self.args.get('exec_mode', False)
        self.runtime = SkyRuntime(apply=self.args.get('apply', False))
        self.system = SkySystem()
        self.chef_type = None

        self.id = self.args.get('skybase_id')
        self.target_service = None
        self.target_planet = None

        self.source_artiball = self.args.get('source_artiball')
        self.source_service = None

        self.planet_name = self.args.get('planet_name')
        self.planet = None

        self.update_plan = self.args.get('update_plan')
Ejemplo n.º 20
0
 def __init__(self, all_args=None, runner_cfg=None):
     SkyTask.__init__(self, all_args, runner_cfg)
     self.logger = Logger(logging.getLogger(__name__))
     self.name = 'chef.delete_node'
     self.args = all_args
     self.runner_cfg = runner_cfg
Ejemplo n.º 21
0
 def __init__(self, all_args, runner_cfg):
     SkyTask.__init__(self, all_args, runner_cfg)
     self.logger = Logger(logging.getLogger(__name__), logging.INFO)
     self.name = 'pack.upload'
     self.args = all_args
     self.runner_cfg = runner_cfg
Ejemplo n.º 22
0
class Upload(SkyTask):
    def __init__(self, all_args, runner_cfg):
        SkyTask.__init__(self, all_args, runner_cfg)
        self.logger = Logger(logging.getLogger(__name__), logging.INFO)
        self.name = 'pack.upload'
        self.args = all_args
        self.runner_cfg = runner_cfg

    def execute(self):
        if self.args['base_dir']:
            base_dir = self.args['base_dir']
            if base_dir.split('/')[-1] is not 'skybase':
                base_dir = os.path.join(base_dir, 'skybase')
        else:
            base_dir = os.path.join(os.getcwd(), 'skybase')

        pack.set_incoming_s3_bucket()

        if self.args['artiball']:
            if self.args['artiball'].endswith('.tar.gz'):
                file_path = os.path.join(base_dir, 'package',
                                         self.args['artiball'])
            else:
                file_path = os.path.join(base_dir, 'package',
                                         self.args['artiball'] + '.tar.gz')
        else:
            packages = os.listdir(os.path.join(base_dir, 'package'))
            for package in packages:
                file_path = os.path.join(base_dir, 'package', package)

        aws_creds_file = os.path.expanduser(os.path.join(
            '~', '.aws', 'config'))
        if os.path.exists(aws_creds_file):
            config = ConfigParser.ConfigParser()
            config.read([str(aws_creds_file)])
            aws_access_key_id = config.get('default', 'aws_access_key_id')
            aws_secret_access_key = config.get('default',
                                               'aws_secret_access_key')

            # Hardcoding default to True for pack group command, revisit later
            self.args['apply'] = True
            if self.args['apply']:
                self.logger.write('Uploading package to S3 bucket ' +
                                  pack.PACKAGE_INCOMING_S3_BUCKET,
                                  multi_line=False)
                upload_result = sky_boto_actions.upload_to_s3(
                    pack.PACKAGE_INCOMING_S3_BUCKET,
                    file_path,
                    self.logger,
                    access_key=aws_access_key_id,
                    secret_key=aws_secret_access_key,
                    dry_run=False)
            else:
                self.logger.write(
                    'DRY RUN :::: Dry running steps for package upload to S3 bucket '
                    + pack.PACKAGE_INCOMING_S3_BUCKET,
                    multi_line=False)
                upload_result = sky_boto_actions.upload_to_s3(
                    pack.PACKAGE_INCOMING_S3_BUCKET,
                    file_path,
                    self.logger,
                    access_key=aws_access_key_id,
                    secret_key=aws_secret_access_key,
                    dry_run=True)

            if upload_result["valid"]:
                self.result.status = sky_cfg.API_STATUS_SUCCESS
            else:
                self.result.status = sky_cfg.API_STATUS_FAIL
            self.result.output += upload_result["result_string"]
        else:
            self.result.output += "Cannot locate aws credentials, please confirm they are set in " + aws_creds_file \
                                  + "\n"
            self.result.status = sky_cfg.API_STATUS_FAIL
            return self.result

        return self.result

    def preflight_check(self):
        result = skytask.TaskResult()
        if self.args['base_dir']:
            base_dir = self.args['base_dir']
            if base_dir.split('/')[-1] is not 'skybase':
                base_dir = os.path.join(base_dir, 'skybase')
        else:
            base_dir = os.path.join(os.getcwd(), 'skybase')

        if self.args['artiball']:
            if self.args['artiball'].endswith('.tar.gz'):
                file_path = os.path.join(base_dir, 'package',
                                         self.args['artiball'])
            else:
                file_path = os.path.join(base_dir, 'package',
                                         self.args['artiball'] + '.tar.gz')
            if not os.path.exists(file_path):
                result.output += "Cannot find specified artiball " + file_path + "\n"
                result.status = 'FAIL'
                return result
        else:
            packages = os.listdir(os.path.join(base_dir, 'package'))
            if len(packages) > 1:
                result.output += "Multiple artiballs found in project, please use -a to specify artiball.\n"
                result.status = 'FAIL'
                return result
            for package in packages:
                if package.endswith('.tar.gz'):
                    result.status = sky_cfg.API_STATUS_SUCCESS
                    return result
            result.output += "Cannot find package in project, please use -a to specify artiball.\n"
            result.status = 'FAIL'
        return result
Ejemplo n.º 23
0
 def __init__(self, all_args=None, runner_cfg=None):
     SkyTask.__init__(self, all_args, runner_cfg)
     self.logger = Logger(logging.getLogger(__name__), logging.INFO)
     self.name = 'route.ping'
     self.args = all_args
     self.runner_cfg = runner_cfg
Ejemplo n.º 24
0
class Upload(SkyTask):
    def __init__(self, all_args, runner_cfg):
        SkyTask.__init__(self, all_args, runner_cfg)
        self.logger = Logger(logging.getLogger(__name__), logging.INFO)
        self.name = "pack.upload"
        self.args = all_args
        self.runner_cfg = runner_cfg

    def execute(self):
        if self.args["base_dir"]:
            base_dir = self.args["base_dir"]
            if base_dir.split("/")[-1] is not "skybase":
                base_dir = os.path.join(base_dir, "skybase")
        else:
            base_dir = os.path.join(os.getcwd(), "skybase")

        pack.set_incoming_s3_bucket()

        if self.args["artiball"]:
            if self.args["artiball"].endswith(".tar.gz"):
                file_path = os.path.join(base_dir, "package", self.args["artiball"])
            else:
                file_path = os.path.join(base_dir, "package", self.args["artiball"] + ".tar.gz")
        else:
            packages = os.listdir(os.path.join(base_dir, "package"))
            for package in packages:
                file_path = os.path.join(base_dir, "package", package)

        aws_creds_file = os.path.expanduser(os.path.join("~", ".aws", "config"))
        if os.path.exists(aws_creds_file):
            config = ConfigParser.ConfigParser()
            config.read([str(aws_creds_file)])
            aws_access_key_id = config.get("default", "aws_access_key_id")
            aws_secret_access_key = config.get("default", "aws_secret_access_key")

            # Hardcoding default to True for pack group command, revisit later
            self.args["apply"] = True
            if self.args["apply"]:
                self.logger.write("Uploading package to S3 bucket " + pack.PACKAGE_INCOMING_S3_BUCKET, multi_line=False)
                upload_result = sky_boto_actions.upload_to_s3(
                    pack.PACKAGE_INCOMING_S3_BUCKET,
                    file_path,
                    self.logger,
                    access_key=aws_access_key_id,
                    secret_key=aws_secret_access_key,
                    dry_run=False,
                )
            else:
                self.logger.write(
                    "DRY RUN :::: Dry running steps for package upload to S3 bucket " + pack.PACKAGE_INCOMING_S3_BUCKET,
                    multi_line=False,
                )
                upload_result = sky_boto_actions.upload_to_s3(
                    pack.PACKAGE_INCOMING_S3_BUCKET,
                    file_path,
                    self.logger,
                    access_key=aws_access_key_id,
                    secret_key=aws_secret_access_key,
                    dry_run=True,
                )

            if upload_result["valid"]:
                self.result.status = sky_cfg.API_STATUS_SUCCESS
            else:
                self.result.status = sky_cfg.API_STATUS_FAIL
            self.result.output += upload_result["result_string"]
        else:
            self.result.output += (
                "Cannot locate aws credentials, please confirm they are set in " + aws_creds_file + "\n"
            )
            self.result.status = sky_cfg.API_STATUS_FAIL
            return self.result

        return self.result

    def preflight_check(self):
        result = skytask.TaskResult()
        if self.args["base_dir"]:
            base_dir = self.args["base_dir"]
            if base_dir.split("/")[-1] is not "skybase":
                base_dir = os.path.join(base_dir, "skybase")
        else:
            base_dir = os.path.join(os.getcwd(), "skybase")

        if self.args["artiball"]:
            if self.args["artiball"].endswith(".tar.gz"):
                file_path = os.path.join(base_dir, "package", self.args["artiball"])
            else:
                file_path = os.path.join(base_dir, "package", self.args["artiball"] + ".tar.gz")
            if not os.path.exists(file_path):
                result.output += "Cannot find specified artiball " + file_path + "\n"
                result.status = "FAIL"
                return result
        else:
            packages = os.listdir(os.path.join(base_dir, "package"))
            if len(packages) > 1:
                result.output += "Multiple artiballs found in project, please use -a to specify artiball.\n"
                result.status = "FAIL"
                return result
            for package in packages:
                if package.endswith(".tar.gz"):
                    result.status = sky_cfg.API_STATUS_SUCCESS
                    return result
            result.output += "Cannot find package in project, please use -a to specify artiball.\n"
            result.status = "FAIL"
        return result
Ejemplo n.º 25
0
 def __init__(self, all_args=None, runner_cfg=None):
     SkyTask.__init__(self, all_args, runner_cfg)
     self.logger = Logger(logging.getLogger(__name__), logging.WARNING)
     self.name = 'pack.create'
     self.args = all_args
     self.runner_cfg = runner_cfg
Ejemplo n.º 26
0
 def __init__(self, all_args, runner_cfg):
     SkyTask.__init__(self, all_args, runner_cfg)
     self.logger = Logger(logging.getLogger(__name__), logging.INFO)
     self.name = "pack.upload"
     self.args = all_args
     self.runner_cfg = runner_cfg
Ejemplo n.º 27
0
 def __init__(self, all_args=None, runner_cfg=None):
     SkyTask.__init__(self, all_args, runner_cfg)
     self.logger = Logger(logging.getLogger(__name__), logging.INFO)
     self.name = 'chef.update_cookbooks'
     self.args = all_args
     self.runner_cfg = runner_cfg
Ejemplo n.º 28
0
class UpdateCookbooks(SkyTask):
    def __init__(self, all_args=None, runner_cfg=None):
        SkyTask.__init__(self, all_args, runner_cfg)
        self.logger = Logger(logging.getLogger(__name__), logging.INFO)
        self.name = 'chef.update_cookbooks'
        self.args = all_args
        self.runner_cfg = runner_cfg

    def execute(self):
        result = 0
        artiball_data_dir = self.runner_cfg.data['artiball_data_dir']
        artiball_dir = os.path.join(artiball_data_dir, self.args['artiball'].split('.tar.gz')[0])
        if os.path.exists(artiball_dir):
            planet_data_dir = self.runner_cfg.data['planet_data_dir']
            knife_env_path = os.path.join(planet_data_dir, self.args['planet'])
            knife_config_path = os.path.join(knife_env_path, 'chef', self.args['planet'] + '.knife.rb')

            if os.path.exists(knife_config_path):
                dep_cookbook_path = os.path.join(artiball_dir, 'installation', 'chef', "cookbooks")

                cookbooks_order_file = os.path.join(artiball_dir, 'installation', 'chef', 'cookbook-order.yaml')
                with open(cookbooks_order_file) as f:
                    dependencies = f.readlines()
                    for dep in dependencies:
                        self.logger.write('Updating cookbook ' + dep.rstrip('\n') + ' in planet ' + self.args['planet'],
                                          multi_line=False)
                        self.logger.write(sky_chef_actions.cookbook_upload(knife_env_path, knife_config_path,
                                                                           dep_cookbook_path, dep.rstrip('\n'),
                                                                           self.logger),
                                          multi_line=False)
            else:
                self.logger.write("Cannot locate planet knife config " + knife_config_path
                                  + ", please confirm it exists", multi_line=False)
        else:
            self.logger.write("Cannot locate artiball " + self.args['artiball'] + ", please confirm it exists in "
                              + artiball_data_dir, multi_line=False)
            return 1
        return result

    @property
    def is_executable(self):
        if self.args['artiball'] is None:
            self.logger.write("Missing artiball argument, please use -a to specify.", multi_line=False)
            return False
        if self.args['planet'] is None:
            self.logger.write("Missing planet argument, please use -p to specify.", multi_line=False)
            return False
        return True
Ejemplo n.º 29
0
class Artiball(object):
    '''
    This class contains info to pack a Service into Artiball
    and perform verifications and convertions into different formats

    Mode 1: initialize blank dir so it can be checked into repo

    Mode 2: check out of repo; create a temp dir put stuff together and create tarball
            a) each


    '''

    def __init__(self, base_dir=None):
        logging.basicConfig(level=logging.INFO)
        self.logger = Logger(logging.getLogger(__name__), logging.INFO)

        self.base_dir = base_dir
        self.yaml_files = []

        self.app_source = None
        self.cookbooks = []
        self.chef_repo = None
        self.chef_repo_branch = None
        self.use_berkshelf = False
        self.databags = []
        self.encrypted_databags = []

        self.manifest = {}

        self.initialize()

    def initialize(self):
        if not self.base_dir:
            self.base_dir = os.path.join(os.getcwd(), 'skybase')
        elif self.base_dir.split('/')[-1] != 'skybase':
            self.base_dir = os.path.join(self.base_dir, 'skybase')
        self.load_yaml_files()
        return self.base_dir

    def load_yaml_files(self):
        for path, dirs, files in os.walk(self.base_dir):
            for file in files:
                if file.split('.')[-1] == 'yaml':
                    self.yaml_files.append(os.path.join(path, file))

    def update_content(self):
        config_file = os.path.join(self.base_dir, 'skybase.yaml')
        with open(config_file, 'r') as temp_file:
            config = yaml.load(temp_file)
            self.app_source = config['packing']['application']['source_location']
            installations = config['packing']['installations']
            for installation in installations:
                if installation.get("chef"):
                    self.databags = installation['chef']['databags']
                    self.encrypted_databags = installation['chef']['encrypted_databags']

                    self.chef_repo = installation['chef']['repository_url']
                    self.chef_repo_branch = installation['chef']['repository_branch']
                    self.use_berkshelf = installation['chef']['cookbooks']['dependencies_from_berkshelf']
        temp_file.close()

        deployment_file = os.path.join(self.base_dir, 'deployment', 'main_deployment.yaml')
        with open(deployment_file, 'r') as temp_file:
            try:
                deployment = yaml.load(temp_file)
            except yaml.scanner.ScannerError:
                self.logger.write("Invalid yaml syntax  " + deployment_file + '\n', multi_line=True)
                sys.exit(1)
            stacks = deployment['stacks']
            for stack in stacks:
                roles = stack['roles']
                for role in roles:
                    for runlist_item in role['chef_role_runlist']:
                        self.cookbooks.append(runlist_item.split('::')[0])
        self.cookbooks = sorted(set(self.cookbooks))
        temp_file.close()

    def update_manifest(self, build_id=None):
        self.manifest = schemas.create_unordered_dict_from_schema(
            schemas.get_schema('manifest_yaml_schema', 'artiball'), 'manifest', 'artiball')
        if build_id:
            self.manifest['metadata']['build_id'] = build_id
        for file in self.yaml_files:
            if 'main_deployment' in file:
                with open(file, 'r') as temp_file:
                    try:
                        deployment_data = yaml.load(temp_file)
                    except yaml.scanner.ScannerError:
                        self.logger.write("Invalid yaml syntax  " + file + '\n', multi_line=True)
                        sys.exit(1)
                    schemas.set_indicators()
                    if deployment_data['definition']['service_name'] not in schemas.INDICATORS:
                        self.manifest['metadata']['app_name'] = deployment_data['definition']['service_name']
                    if deployment_data['definition']['version'] not in schemas.INDICATORS:
                        self.manifest['metadata']['app_version'] = deployment_data['definition']['version']
                    if deployment_data['definition']['chef_type'] == 'server':
                        with open(os.path.join(self.base_dir, 'skybase.yaml'), 'r') as config_file:
                            try:
                                skybase_data = yaml.load(config_file)
                            except yaml.scanner.ScannerError:
                                self.logger.write("Invalid yaml syntax  " + file + '\n', multi_line=True)
                                sys.exit(1)
                            schemas.set_indicators()
                            for installation in skybase_data['packing']['installations']:
                                if installation.get("chef"):
                                    if (installation['chef']['repository_url'] not in schemas.INDICATORS) and \
                                            (installation['chef']['repository_branch'] not in schemas.INDICATORS):
                                        self.manifest['chef_cookbook_source'] = installation['chef']['repository_url'] + \
                                                                                '=' + installation['chef']['repository_branch']
                        config_file.close()
                temp_file.close()

    def create_manifest_file(self, build_id=None):
        self.update_manifest(build_id)
        manifest_file = os.path.join(self.base_dir, 'manifest.yaml')
        if os.path.exists(manifest_file):
            os.remove(manifest_file)
        with open(manifest_file, 'wb') as temp_file:
            yaml.dump(self.manifest, temp_file, allow_unicode=True, default_flow_style=False)
        temp_file.close()
Ejemplo n.º 30
0
class Create(SkyTask):
    def __init__(self, all_args=None, runner_cfg=None):
        SkyTask.__init__(self, all_args, runner_cfg)
        self.logger = Logger(logging.getLogger(__name__), logging.WARNING)
        self.name = 'pack.create'
        self.args = all_args
        self.runner_cfg = runner_cfg

    def execute(self):
        if self.args['base_dir']:
            base_dir = self.args['base_dir']
            if base_dir.split('/')[-1] is not 'skybase':
                base_dir = os.path.join(base_dir, 'skybase')
        else:
            base_dir = os.path.join(os.getcwd(), 'skybase')

        artiball = ab_object.Artiball(base_dir)

        if self.args['build_id']:
            artiball.create_manifest_file(self.args['build_id'])
        else:
            artiball.create_manifest_file('NoBuildID')
        artiball.update_content()

        # Hardcoding default to True for pack group command, revisit later
        self.args['apply'] = True
        if self.args['apply']:
            if self.args['verbose']:
                self.logger.write('Creating package in ' + base_dir,
                                  multi_line=False)
            pack_result = pack_actions.pack(artiball.base_dir,
                                            artiball.app_source,
                                            artiball.chef_repo,
                                            artiball.chef_repo_branch,
                                            artiball.cookbooks,
                                            artiball.use_berkshelf,
                                            artiball.databags,
                                            artiball.encrypted_databags,
                                            artiball.manifest,
                                            dry_run=False,
                                            verbose=self.args['verbose'])
        else:
            if self.args['verbose']:
                self.logger.write(
                    'DRY RUN :::: Dry running steps for package creation in ' +
                    base_dir,
                    multi_line=False)
            pack_result = pack_actions.pack(artiball.base_dir,
                                            artiball.app_source,
                                            artiball.chef_repo,
                                            artiball.chef_repo_branch,
                                            artiball.cookbooks,
                                            artiball.use_berkshelf,
                                            artiball.databags,
                                            artiball.encrypted_databags,
                                            artiball.manifest,
                                            dry_run=True,
                                            verbose=self.args['verbose'])

        if pack_result["valid"]:
            self.result.status = sky_cfg.API_STATUS_SUCCESS
        else:
            self.result.status = sky_cfg.API_STATUS_FAIL

        self.result.output += pack_result["result_string"]
        return self.result

    def preflight_check(self):
        result = skytask.TaskResult()
        validate = skytask.get_task_class_by_name('pack.validate')(self.args)
        result = validate.execute()
        if result.status == sky_cfg.API_STATUS_FAIL:
            result.output += "Invalid content for packing, please correct accordingly.\n"
            result.status = 'FAIL'
        return result
Ejemplo n.º 31
0
class Artiball(object):
    '''
    This class contains info to pack a Service into Artiball
    and perform verifications and convertions into different formats

    Mode 1: initialize blank dir so it can be checked into repo

    Mode 2: check out of repo; create a temp dir put stuff together and create tarball
            a) each


    '''
    def __init__(self, base_dir=None):
        logging.basicConfig(level=logging.INFO)
        self.logger = Logger(logging.getLogger(__name__), logging.INFO)

        self.base_dir = base_dir
        self.yaml_files = []

        self.app_source = None
        self.cookbooks = []
        self.chef_repo = None
        self.chef_repo_branch = None
        self.use_berkshelf = False
        self.databags = []
        self.encrypted_databags = []

        self.manifest = {}

        self.initialize()

    def initialize(self):
        if not self.base_dir:
            self.base_dir = os.path.join(os.getcwd(), 'skybase')
        elif self.base_dir.split('/')[-1] != 'skybase':
            self.base_dir = os.path.join(self.base_dir, 'skybase')
        self.load_yaml_files()
        return self.base_dir

    def load_yaml_files(self):
        for path, dirs, files in os.walk(self.base_dir):
            for file in files:
                if file.split('.')[-1] == 'yaml':
                    self.yaml_files.append(os.path.join(path, file))

    def update_content(self):
        config_file = os.path.join(self.base_dir, 'skybase.yaml')
        with open(config_file, 'r') as temp_file:
            config = yaml.load(temp_file)
            self.app_source = config['packing']['application'][
                'source_location']
            installations = config['packing']['installations']
            for installation in installations:
                if installation.get("chef"):
                    self.databags = installation['chef']['databags']
                    self.encrypted_databags = installation['chef'][
                        'encrypted_databags']

                    self.chef_repo = installation['chef']['repository_url']
                    self.chef_repo_branch = installation['chef'][
                        'repository_branch']
                    self.use_berkshelf = installation['chef']['cookbooks'][
                        'dependencies_from_berkshelf']
        temp_file.close()

        deployment_file = os.path.join(self.base_dir, 'deployment',
                                       'main_deployment.yaml')
        with open(deployment_file, 'r') as temp_file:
            try:
                deployment = yaml.load(temp_file)
            except yaml.scanner.ScannerError:
                self.logger.write("Invalid yaml syntax  " + deployment_file +
                                  '\n',
                                  multi_line=True)
                sys.exit(1)
            stacks = deployment['stacks']
            for stack in stacks:
                roles = stack['roles']
                for role in roles:
                    for runlist_item in role['chef_role_runlist']:
                        self.cookbooks.append(runlist_item.split('::')[0])
        self.cookbooks = sorted(set(self.cookbooks))
        temp_file.close()

    def update_manifest(self, build_id=None):
        self.manifest = schemas.create_unordered_dict_from_schema(
            schemas.get_schema('manifest_yaml_schema', 'artiball'), 'manifest',
            'artiball')
        if build_id:
            self.manifest['metadata']['build_id'] = build_id
        for file in self.yaml_files:
            if 'main_deployment' in file:
                with open(file, 'r') as temp_file:
                    try:
                        deployment_data = yaml.load(temp_file)
                    except yaml.scanner.ScannerError:
                        self.logger.write("Invalid yaml syntax  " + file +
                                          '\n',
                                          multi_line=True)
                        sys.exit(1)
                    schemas.set_indicators()
                    if deployment_data['definition'][
                            'service_name'] not in schemas.INDICATORS:
                        self.manifest['metadata'][
                            'app_name'] = deployment_data['definition'][
                                'service_name']
                    if deployment_data['definition'][
                            'version'] not in schemas.INDICATORS:
                        self.manifest['metadata'][
                            'app_version'] = deployment_data['definition'][
                                'version']
                    if deployment_data['definition']['chef_type'] == 'server':
                        with open(os.path.join(self.base_dir, 'skybase.yaml'),
                                  'r') as config_file:
                            try:
                                skybase_data = yaml.load(config_file)
                            except yaml.scanner.ScannerError:
                                self.logger.write("Invalid yaml syntax  " +
                                                  file + '\n',
                                                  multi_line=True)
                                sys.exit(1)
                            schemas.set_indicators()
                            for installation in skybase_data['packing'][
                                    'installations']:
                                if installation.get("chef"):
                                    if (installation['chef']['repository_url'] not in schemas.INDICATORS) and \
                                            (installation['chef']['repository_branch'] not in schemas.INDICATORS):
                                        self.manifest['chef_cookbook_source'] = installation['chef']['repository_url'] + \
                                                                                '=' + installation['chef']['repository_branch']
                        config_file.close()
                temp_file.close()

    def create_manifest_file(self, build_id=None):
        self.update_manifest(build_id)
        manifest_file = os.path.join(self.base_dir, 'manifest.yaml')
        if os.path.exists(manifest_file):
            os.remove(manifest_file)
        with open(manifest_file, 'wb') as temp_file:
            yaml.dump(self.manifest,
                      temp_file,
                      allow_unicode=True,
                      default_flow_style=False)
        temp_file.close()
Ejemplo n.º 32
0
 def __init__(self, all_args=None, runner_cfg=None):
     SkyTask.__init__(self, all_args, runner_cfg)
     self.logger = Logger(logging.getLogger(__name__), logging.WARNING)
     self.name = 'pack.create'
     self.args = all_args
     self.runner_cfg = runner_cfg
Ejemplo n.º 33
0
class Submit(SkyTask):
    def __init__(self, all_args, runner_cfg):
        SkyTask.__init__(self, all_args, runner_cfg)
        self.logger = Logger(logging.getLogger(__name__), logging.INFO)
        self.name = 'pack.submit'
        self.args = all_args
        self.runner_cfg = runner_cfg
        self.aws_access_key_id = None
        self.aws_secret_access_key = None

        if self.args['base_dir']:
            self.base_dir = self.args['base_dir']
            if self.base_dir.split('/')[-1] is not 'skybase':
                self.base_dir = os.path.join(self.base_dir, 'skybase')
        else:
            self.tdir = tempfile.mkdtemp()
            self.base_dir = os.path.join(self.tdir, 'skybase')
        self.tmp_dir = os.path.join(self.base_dir, 'tmp')
        if not os.path.exists(self.tmp_dir):
            os.makedirs(self.tmp_dir)

    def execute(self):
        aws_creds_profile = self.runner_cfg.data['package_depot_aws_profile']
        aws_creds_file = os.path.join(self.runner_cfg.data['runner_credentials_dir'], 'aws', 'config')
        if os.path.exists(aws_creds_file):
            config = ConfigParser.ConfigParser()
            config.read([str(aws_creds_file)])
            self.aws_access_key_id = config.get('profile ' + aws_creds_profile, 'aws_access_key_id')
            self.aws_secret_access_key = config.get('profile ' + aws_creds_profile, 'aws_secret_access_key')

            if self.args['artiball'].endswith('.tar.gz'):
                artiball = self.args['artiball']
            else:
                artiball = self.args['artiball'] + '.tar.gz'

            pack.set_incoming_s3_bucket()
            self.logger.write('Downloading package from S3 bucket ' + pack.PACKAGE_INCOMING_S3_BUCKET, multi_line=False)
            download_result = sky_boto_actions.download_from_s3(pack.PACKAGE_INCOMING_S3_BUCKET, artiball,
                                                                self.tmp_dir, self.logger,
                                                                access_key=self.aws_access_key_id,
                                                                secret_key=self.aws_secret_access_key,
                                                                dry_run=False)
            self.result.output += download_result["result_string"]
            if not download_result["valid"]:
                self.result.status = sky_cfg.API_STATUS_FAIL
                return self.result

            artiball_file = tarfile.open(os.path.join(self.tmp_dir, artiball), 'r:gz')
            artiball_dir = os.path.join(self.tmp_dir, artiball.split('.tar.gz')[0])
            artiball_file.extractall(os.path.join(artiball_dir, 'skybase'))
            self.logger.write('Validating package in ' + artiball_dir, multi_line=False)
            validate_result = pack_actions.validate_with_schema(artiball_dir, 'artiball',
                                                                update_content_from_config=False)
            if validate_result["valid"]:
                self.result.output += "All content validated, ready for pack submit.\n"
            else:
                self.result.output += "Invalid content for submission, please verify artiball is valid.\n"
                self.result.status = sky_cfg.API_STATUS_FAIL
                return self.result

            app_dir = os.path.join(artiball_dir, 'skybase', 'app')
            yum_aws_creds_file = os.path.join(self.runner_cfg.data['runner_credentials_dir'], 'aws', 'config')

            if os.path.exists(yum_aws_creds_file):
                config = ConfigParser.ConfigParser()
                config.read([str(yum_aws_creds_file)])
                yum_replications = self.runner_cfg.data['yum_replication']
                for yum_replication in yum_replications:
                    yum_aws_creds_profile = yum_replication['profile']
                    yum_aws_access_key_id = config.get('profile ' + yum_aws_creds_profile, 'aws_access_key_id')
                    yum_aws_secret_access_key = config.get('profile ' + yum_aws_creds_profile, 'aws_secret_access_key')
                    for f in os.listdir(app_dir):
                        if os.path.splitext(f)[1] == '.rpm':
                            # Hardcoding default to True for pack group command, revisit later
                            self.args['apply'] = True
                            if self.args['apply']:
                                upload_result = sky_boto_actions.upload_to_s3(yum_replication['name'],
                                                                              os.path.join(app_dir, f),
                                                                              self.logger, prefix='inbox/skybase',
                                                                              access_key=yum_aws_access_key_id,
                                                                              secret_key=yum_aws_secret_access_key,
                                                                              dry_run=False)
                            else:
                                upload_result = sky_boto_actions.upload_to_s3(yum_replication['name'],
                                                                              os.path.join(app_dir, f),
                                                                              self.logger, prefix='inbox/skybase',
                                                                              access_key=yum_aws_access_key_id,
                                                                              secret_key=yum_aws_secret_access_key,
                                                                              dry_run=True)
                            self.result.output += upload_result["result_string"]
                            if not upload_result["valid"]:
                                self.result.status = sky_cfg.API_STATUS_FAIL
                                return self.result

            else:
                self.result.output += "Cannot locate aws credentials, please confirm they are set in " \
                                      + yum_aws_creds_file + "\n"
                self.result.status = sky_cfg.API_STATUS_FAIL
                return self.result
        else:
            self.result.output += "Cannot locate aws credentials, please confirm they are set in " + aws_creds_file \
                                  + "\n"
            self.result.status = sky_cfg.API_STATUS_FAIL
            return self.result

        file_path = os.path.join(self.tmp_dir, artiball)
        depot_bucket_name = os.path.expanduser(self.runner_cfg.data['package_depot_S3_bucket'])

        # Hardcoding default to True for pack group command, revisit later
        self.args['apply'] = True
        if self.args['apply']:
            self.logger.write('Uploading package to S3 bucket ' + depot_bucket_name, multi_line=False)
            upload_result = sky_boto_actions.upload_to_s3(depot_bucket_name, file_path, self.logger,
                                                          access_key=self.aws_access_key_id,
                                                          secret_key=self.aws_secret_access_key,
                                                          dry_run=False)
            self.result.output += upload_result["result_string"]
            if not upload_result["valid"]:
                self.result.status = sky_cfg.API_STATUS_FAIL
                return self.result

            self.logger.write('Archiving package in S3 bucket ' + pack.PACKAGE_INCOMING_S3_BUCKET, multi_line=False)
            move_result = sky_boto_actions.move_object_s3(pack.PACKAGE_INCOMING_S3_BUCKET, pack.PACKAGE_INCOMING_S3_BUCKET,
                                                          os.path.basename(file_path),
                                                          src_prefix=None, dst_prefix='archive',
                                                          access_key=self.aws_access_key_id,
                                                          secret_key=self.aws_secret_access_key,
                                                          dry_run=False)
        else:
            self.logger.write('DRY RUN :::: Dry running steps for package upload to S3 bucket ' + depot_bucket_name,
                              multi_line=False)
            upload_result = sky_boto_actions.upload_to_s3(depot_bucket_name, file_path, self.logger,
                                                          access_key=self.aws_access_key_id,
                                                          secret_key=self.aws_secret_access_key,
                                                          dry_run=True)
            self.result.output += upload_result["result_string"]
            if not upload_result["valid"]:
                self.result.status = sky_cfg.API_STATUS_FAIL
                return self.result
            self.logger.write('DRY RUN :::: Dry running steps for archiving package in S3 bucket '
                              + pack.PACKAGE_INCOMING_S3_BUCKET, multi_line=False)
            move_result = sky_boto_actions.move_object_s3(pack.PACKAGE_INCOMING_S3_BUCKET, pack.PACKAGE_INCOMING_S3_BUCKET,
                                                          os.path.basename(file_path),
                                                          src_prefix=None, dst_prefix='archive',
                                                          access_key=self.aws_access_key_id,
                                                          secret_key=self.aws_secret_access_key,
                                                          dry_run=True)
        self.result.output += move_result["result_string"]
        if not move_result["valid"]:
                self.result.status = sky_cfg.API_STATUS_FAIL
                return self.result

        if hasattr(self,'tdir') and os.path.exists(self.tdir):
            shutil.rmtree(self.tdir)
        else:
            shutil.rmtree(self.tmp_dir)
        self.result.status = sky_cfg.API_STATUS_SUCCESS
        return self.result

    def preflight_check(self):
        result = skytask.TaskResult()
        if self.args['artiball'] is None:
            result.output += "Missing artiball argument, please use -a to specify.\n"
            shutil.rmtree(self.tmp_dir)
            result.status = 'FAIL'
        return result