Esempio n. 1
0
 def send_blueprint_file(self, filename, name=''):
     # log.debug('send_blueprint_file(%s, %s)' % (filename, name))
     validate_file(filename, 'blueprint', nolog=True)
     try:
         _ = open(str(filename))
         file_data = _.read()
     except IOError as _:
         err = "failed to read Ambari Blueprint from file '%s': %s" % (file, _)
         # log.critical(err)
         qquit('CRITICAL', err)
     if not name:
         try:
             name = self.parse_blueprint_name(file_data)
             log.info("name not specified, determined blueprint name from file contents as '%s'" % name)
         except KeyError as _:
             pass
     if not name:
         name = os.path.splitext(os.path.basename(file))[0]
         log.info("name not specified and couldn't determine blueprint name from blueprint data, reverting to using filename without extension '%s'" % name) # pylint: disable=line-too-long
     # this solves the issue of having duplicate Blueprint.blueprint_name keys
     try:
         json_data = json.loads(file_data)
         json_data['Blueprints']['blueprint_name'] = name
         data = json.dumps(json_data)
         log.info("reset blueprint field name to '%s'" % name)
     except ValueError as _:
         qquit('CRITICAL', "invalid json found in file '%s': %s" % (file, name))
     except KeyError as _:
         log.warn('failed to reset the Blueprint name: %s' % _)
     return self.send_blueprint(name, data)
Esempio n. 2
0
 def create_cluster(self, cluster, filename, blueprint=''):
     # log.debug('create_cluster(%s, %s)' % (filename, name))
     validate_file(filename, 'cluster hosts mapping', nolog=True)
     try:
         _ = open(str(filename))
         file_data = _.read()
     except IOError as _:
         err = "failed to read Ambari cluster host mapping from file '%s': %s" % (filename, _)
         # log.critical(err)
         qquit('CRITICAL', err)
     log.info("creating cluster '%s' using file '%s'" % (cluster, filename))
     if not isJson(file_data):
         qquit('CRITICAL', "invalid json found in file '%s'" % filename)
     # don't have access to a blueprint name to enforce reset here
     # json_data = json.loads(file_data)
     # try:
     #     json_data['Blueprints']['blueprint_name'] = blueprint
     # except KeyError, e:
     #     qquit('CRITICAL', 'failed to (re)set blueprint name in cluster/hostmapping data before creating cluster')
     if blueprint:
         try:
             log.info("setting blueprint in cluster creation to '%s'" % blueprint)
             json_data = json.loads(file_data)
             json_data['blueprint'] = blueprint
             file_data = json.dumps(json_data)
         except KeyError as _:
             log.warn("failed to inject blueprint name '%s' in to cluster creation" % blueprint)
     response = self.send('clusters/%s' % cluster, file_data)
     log.info("Cluster creation submitted, see Ambari web UI to track progress")
     return response
Esempio n. 3
0
 def create_cluster(self, cluster, filename, blueprint=''):
     # log.debug('create_cluster(%s, %s)' % (filename, name))
     validate_file(filename, 'cluster hosts mapping', nolog=True)
     try:
         _ = open(str(filename))
         file_data = _.read()
     except IOError as _:
         err = "failed to read Ambari cluster host mapping from file '%s': %s" % (
             filename, _)
         # log.critical(err)
         qquit('CRITICAL', err)
     log.info("creating cluster '%s' using file '%s'" % (cluster, filename))
     if not isJson(file_data):
         qquit('CRITICAL', "invalid json found in file '%s'" % filename)
     # don't have access to a blueprint name to enforce reset here
     # json_data = json.loads(file_data)
     # try:
     #     json_data['Blueprints']['blueprint_name'] = blueprint
     # except KeyError, e:
     #     qquit('CRITICAL', 'failed to (re)set blueprint name in cluster/hostmapping data before creating cluster')
     if blueprint:
         try:
             log.info("setting blueprint in cluster creation to '%s'" %
                      blueprint)
             json_data = json.loads(file_data)
             json_data['blueprint'] = blueprint
             file_data = json.dumps(json_data)
         except KeyError as _:
             log.warn(
                 "failed to inject blueprint name '%s' in to cluster creation"
                 % blueprint)
     response = self.send('clusters/%s' % cluster, file_data)
     log.info(
         "Cluster creation submitted, see Ambari web UI to track progress")
     return response
Esempio n. 4
0
 def send_blueprint_file(self, filename, name=''):
     # log.debug('send_blueprint_file(%s, %s)' % (filename, name))
     validate_file(filename, 'blueprint', nolog=True)
     try:
         _ = open(str(filename))
         file_data = _.read()
     except IOError as _:
         err = "failed to read Ambari Blueprint from file '%s': %s" % (
             filename, _)
         # log.critical(err)
         qquit('CRITICAL', err)
     if not name:
         try:
             name = self.parse_blueprint_name(file_data)
             log.info(
                 "name not specified, determined blueprint name from file contents as '%s'"
                 % name)
         except KeyError as _:
             pass
     if not name:
         name = os.path.splitext(os.path.basename(filename))[0]
         log.info("name not specified and couldn't determine blueprint name from blueprint data, reverting to using filename without extension '%s'" % name)  # pylint: disable=line-too-long
     # this solves the issue of having duplicate Blueprint.blueprint_name keys
     try:
         json_data = json.loads(file_data)
         json_data['Blueprints']['blueprint_name'] = name
         data = json.dumps(json_data)
         log.info("reset blueprint field name to '%s'" % name)
     except ValueError as _:
         qquit('CRITICAL',
               "invalid json found in file '%s': %s" % (filename, name))
     except KeyError as _:
         log.warn('failed to reset the Blueprint name: %s' % _)
     return self.send_blueprint(name, data)
Esempio n. 5
0
 def run(self):
     self.no_args()
     filename = self.get_opt('file')
     self.max_file_age = self.get_opt('max_file_age')
     validate_file(filename, 'hbck')
     validate_int(self.max_file_age, 'max file age', 0, 86400 * 31)
     self.max_file_age = int(self.max_file_age)
     self.parse(filename)
Esempio n. 6
0
 def run(self):
     self.no_args()
     filename = self.get_opt('file')
     self.max_file_age = self.get_opt('max_file_age')
     validate_file(filename, 'hbck')
     validate_int(self.max_file_age, 'max file age', 0, 86400 * 31)
     self.max_file_age = int(self.max_file_age)
     self.parse(filename)
Esempio n. 7
0
 def _validate_filenames(self):
     for filename in self.file_list:
         if filename == '-':
             log_option('file', '<STDIN>')
         else:
             validate_file(filename)
     # use stdin
     if not self.file_list:
         self.file_list.add('-')
Esempio n. 8
0
    def process_args(self):
        options, args = self.options, self.args

        log.setLevel(logging.WARN)
        if options.verbose > 1:
            log.setLevel(logging.DEBUG)
        elif options.verbose:
            log.setLevel(logging.INFO)
        # log.info('verbose level: %s' % options.verbose)

        try:
            validate_host(options.host)
            validate_port(options.port)
            validate_user(options.user)
            validate_password(options.password)
            if options.dir:
                validate_dirname(options.dir, 'blueprints')
            if options.file:
                if options.push:
                    validate_file(options.file, 'blueprint')
                if options.create_cluster:
                    validate_file(options.file, 'cluster hosts mapping')
        except InvalidOptionException as _:
            self.usage(_)

        if self.args:
            self.usage('additional args detected')

        if options.get and options.blueprint and options.cluster:
            self.usage(
                '--blueprint/--cluster are mutually exclusive when using --get'
            )
        elif options.push and options.create_cluster:
            self.usage('--push and --create-cluster are mutually exclusive')
        elif options.create_cluster and not options.cluster:
            self.usage(
                '--create-cluster requires specifying the name via --cluster')
        elif options.list_blueprints + options.list_clusters + options.list_hosts > 1:
            self.usage('can only use one --list switch at a time')
        elif options.file and (options.get
                               and not (options.blueprint or options.cluster)):
            self.usage("cannot specify --file without --blueprint/--cluster as it's only used " + \
                       "when getting or pushing a single blueprint")
        elif options.file and (options.push and not (options.create_cluster
                                                     or options.blueprint)):
            self.usage("cannot specify --file without --blueprint/--create-cluster as it's only used " + \
                       "when getting or pushing a single blueprint or creating a cluster based on the blueprint")
        return options, args
Esempio n. 9
0
 def process_args(self):
     self.files = self.args
     self.prefix_length = self.get_opt('key_prefix_length')
     self.skip_errors = self.get_opt('skip_errors')
     self.sort_desc = self.get_opt('desc')
     self.include_timestamps = self.get_opt('include_timestamps')
     if self.prefix_length is not None:
         validate_int(self.prefix_length, 'key key prefix length', 1, 100)
         self.prefix_length = int(self.prefix_length)
     if not self.files:
         self.usage('no file(s) specified as arguments')
     self.files = uniq_list_ordered(self.files)
     for filename in self.files:
         if filename == '-':
             log_option('file', '<stdin>')
             continue
         validate_file(filename)
 def process_args(self):
     self.files = self.args
     self.prefix_length = self.get_opt('key_prefix_length')
     self.skip_errors = self.get_opt('skip_errors')
     self.sort_desc = self.get_opt('desc')
     self.include_timestamps = self.get_opt('include_timestamps')
     if self.prefix_length is not None:
         validate_int(self.prefix_length, 'key key prefix length', 1, 100)
         self.prefix_length = int(self.prefix_length)
     if not self.files:
         self.usage('no file(s) specified as arguments')
     self.files = uniq_list_ordered(self.files)
     for filename in self.files:
         if filename == '-':
             log_option('file', '<stdin>')
             continue
         validate_file(filename)
Esempio n. 11
0
    def process_args(self):
        options, args = self.options, self.args

        log.setLevel(logging.WARN)
        if options.verbose > 1:
            log.setLevel(logging.DEBUG)
        elif options.verbose:
            log.setLevel(logging.INFO)
        # log.info('verbose level: %s' % options.verbose)

        try:
            validate_host(options.host)
            validate_port(options.port)
            validate_user(options.user)
            validate_password(options.password)
            if options.dir:
                validate_dirname(options.dir, 'blueprints')
            if options.file:
                if options.push:
                    validate_file(options.file, 'blueprint')
                if options.create_cluster:
                    validate_file(options.file, 'cluster hosts mapping')
        except InvalidOptionException as _:
            self.usage(_)

        if self.args:
            self.usage('additional args detected')

        if options.get and options.blueprint and options.cluster:
            self.usage('--blueprint/--cluster are mutually exclusive when using --get')
        elif options.push and options.create_cluster:
            self.usage('--push and --create-cluster are mutually exclusive')
        elif options.create_cluster and not options.cluster:
            self.usage('--create-cluster requires specifying the name via --cluster')
        elif options.list_blueprints + options.list_clusters + options.list_hosts > 1:
            self.usage('can only use one --list switch at a time')
        elif options.file and (options.get and not (options.blueprint or options.cluster)):
            self.usage("cannot specify --file without --blueprint/--cluster as it's only used " + \
                       "when getting or pushing a single blueprint")
        elif options.file and (options.push and not (options.create_cluster or options.blueprint)):
            self.usage("cannot specify --file without --blueprint/--create-cluster as it's only used " + \
                       "when getting or pushing a single blueprint or creating a cluster based on the blueprint")
        return options, args
Esempio n. 12
0
 def process_docker_options(self):
     # should look like unix:///var/run/docker.sock or tcp://127.0.0.1:1234
     self.base_url = self.get_opt('base_url')
     if self.base_url:
         validate_chars(self.base_url, 'base url', r'A-Za-z0-9\/\:\.')
     self.tls = self.get_opt('tls')
     if not self.tls and os.getenv('DOCKER_TLS_VERIFY'):
         self.tls = True
     log_option('tls', self.tls)
     if self.tls:
         ca_file = self.get_opt('tlscacert')
         cert_file = self.get_opt('tlscert')
         key_file = self.get_opt('tlskey')
         tls_verify = self.get_opt('tlsverify')
         docker_cert_path = os.getenv('DOCKER_CERT_PATH')
         if docker_cert_path:
             if not ca_file:
                 ca_file = os.path.join(docker_cert_path, 'ca.pem')
             if not cert_file:
                 cert_file = os.path.join(docker_cert_path, 'cert.pem')
             if not key_file:
                 key_file = os.path.join(docker_cert_path, 'key.pem')
             if not tls_verify and os.getenv('DOCKER_TLS_VERIFY'):
                 tls_verify = True
         validate_file(ca_file, 'TLS CA cert file')
         validate_file(cert_file, 'TLS cert file')
         validate_file(key_file, 'TLS key file')
         log_option('TLS verify', tls_verify)
         self.tls_config = docker.tls.TLSConfig(
             ca_cert=ca_file,  # pylint: disable=redefined-variable-type
             verify=tls_verify,
             client_cert=(cert_file, key_file))