def check_process_status(pid_file):
  """
  Function checks whether process is running.
  Process is considered running, if pid file exists, and process with
  a pid, mentioned in pid file is running
  If process is not running, will throw ComponentIsNotRunning exception

  @param pid_file: path to service pid file
  """
  from resource_management.core import sudo

  if not pid_file or not os.path.isfile(pid_file):
    Logger.info("Pid file {0} is empty or does not exist".format(str(pid_file)))
    raise ComponentIsNotRunning()
  
  try:
    pid = int(sudo.read_file(pid_file))
  except:
    Logger.info("Pid file {0} does not exist or does not contain a process id number".format(pid_file))
    raise ComponentIsNotRunning()

  try:
    # Kill will not actually kill the process
    # From the doc:
    # If sig is 0, then no signal is sent, but error checking is still
    # performed; this can be used to check for the existence of a
    # process ID or process group ID.
    sudo.kill(pid, 0)
  except OSError:
    Logger.info("Process with pid {0} is not running. Stale pid file"
              " at {1}".format(pid, pid_file))
    raise ComponentIsNotRunning()
Esempio n. 2
0
  def action_create(self):
    path = self.resource.path
    
    if sudo.path_isdir(path):
      raise Fail("Applying %s failed, directory with name %s exists" % (self.resource, path))
    
    dirname = os.path.dirname(path)
    if not sudo.path_isdir(dirname):
      raise Fail("Applying %s failed, parent directory %s doesn't exist" % (self.resource, dirname))
    
    write = False
    content = self._get_content()
    if not sudo.path_exists(path):
      write = True
      reason = "it doesn't exist"
    elif self.resource.replace:
      if content is not None:
        old_content = sudo.read_file(path, encoding=self.resource.encoding)
        if content != old_content:
          write = True
          reason = "contents don't match"
          if self.resource.backup:
            self.resource.env.backup_file(path)

    if write:
      Logger.info("Writing %s because %s" % (self.resource, reason))
      sudo.create_file(path, content, encoding=self.resource.encoding)

    _ensure_metadata(self.resource.path, self.resource.owner,
                        self.resource.group, mode=self.resource.mode, cd_access=self.resource.cd_access)
Esempio n. 3
0
def get_desired_state():
  import params
  from resource_management.core import sudo
  if os.path.exists(params.ambari_state_file):
    return sudo.read_file(params.ambari_state_file)
  else:
    return 'INSTALLED'
def check_process_status(pid_file):
  """
  Function checks whether process is running.
  Process is considered running, if pid file exists, and process with
  a pid, mentioned in pid file is running
  If process is not running, will throw ComponentIsNotRunning exception

  @param pid_file: path to service pid file
  """
  if not pid_file or not os.path.isfile(pid_file):
    raise ComponentIsNotRunning()
  
  try:
    pid = int(sudo.read_file(pid_file))
  except:
    Logger.debug("Pid file {0} does not exist".format(pid_file))
    raise ComponentIsNotRunning()

  code, out = shell.call(["ps","-p", str(pid)])
  
  if code:
    Logger.debug("Process with pid {0} is not running. Stale pid file"
              " at {1}".format(pid, pid_file))
    raise ComponentIsNotRunning()
  pass
Esempio n. 5
0
  def action_create(self):
    with Environment.get_instance_copy() as env:
      repo_file_name = self.resource.repo_file_name
      repo_dir = get_repo_dir()
      new_content = InlineTemplate(self.resource.repo_template, repo_id=self.resource.repo_id, repo_file_name=self.resource.repo_file_name,
                             base_url=self.resource.base_url, mirror_list=self.resource.mirror_list)
      repo_file_path = format("{repo_dir}/{repo_file_name}.repo")

      if os.path.isfile(repo_file_path):
        existing_content_str = sudo.read_file(repo_file_path)
        new_content_str = new_content.get_content()
        if existing_content_str != new_content_str and OSCheck.is_suse_family():
          # We need to reset package manager's cache when we replace base urls
          # at existing repo. That is a case at least under SLES
          Logger.info("Flushing package manager cache since repo file content is about to change")
          checked_call(self.update_cmd, sudo=True)
        if self.resource.append_to_file:
          content = existing_content_str + '\n' + new_content_str
        else:
          content = new_content_str
      else: # If repo file does not exist yet
        content = new_content

      File(repo_file_path,
           content=content
      )
Esempio n. 6
0
    def stop(self, env):
        import params
        import status_params
        from resource_management.core import sudo
        pid = str(sudo.read_file(status_params.flink_pid_file))
        Execute('yarn application -kill ' + pid, user=params.flink_user)

        Execute('rm ' + status_params.flink_pid_file, ignore_failures=True)
Esempio n. 7
0
    def get_interpreter_settings(self):
        import params
        import json

        interpreter_config = os.path.join(params.conf_dir, "interpreter.json")
        config_content = sudo.read_file(interpreter_config)
        config_data = json.loads(config_content)
        return config_data
Esempio n. 8
0
def get_config_version(version_file, version_type):
    if sudo.path_isfile(version_file):
        contents = sudo.read_file(version_file)
        version = json.loads(contents)
        if version_type in version:
            return version[version_type]
        else:
            return None
Esempio n. 9
0
  def stop(self, env):
    import params
    import status_params    
    from resource_management.core import sudo
    pid = str(sudo.read_file(status_params.flink_pid_file))
    Execute('yarn application -kill ' + pid, user=params.flink_user)

    Execute('rm ' + status_params.flink_pid_file, ignore_failures=True)
Esempio n. 10
0
    def action_create(self):
        filename = self.resource.filename
        comment_symbols = self.resource.comment_symbols
        delimiter = self.resource.key_value_delimiter
        properties = self.resource.properties
        unsaved_values = properties.keys()
        new_content_lines = []
        final_content_lines = ""

        if sudo.path_isfile(filename):
            file_content = sudo.read_file(filename,
                                          encoding=self.resource.encoding)
            new_content_lines += file_content.split('\n')

            Logger.info(
                format("Modifying existing properties file: {filename}"))

            for line_num in range(len(new_content_lines)):
                line = new_content_lines[line_num]

                if line.lstrip() and not line.lstrip(
                )[0] in comment_symbols and delimiter in line:
                    in_var_name = line.split(delimiter)[0].strip()
                    in_var_value = line.split(delimiter)[1].strip()

                    if in_var_name in properties:
                        value = InlineTemplate(unicode(
                            properties[in_var_name])).get_content()
                        new_content_lines[line_num] = u"{0}{1}{2}".format(
                            unicode(in_var_name), delimiter, value)
                        unsaved_values.remove(in_var_name)
        else:
            Logger.info(
                format(
                    "Creating new properties file as {filename} doesn't exist")
            )

        for property_name in unsaved_values:
            value = InlineTemplate(unicode(
                properties[property_name])).get_content()
            line = u"{0}{1}{2}".format(unicode(property_name), delimiter,
                                       value)
            new_content_lines.append(line)

        final_content_lines = u"\n".join(new_content_lines)
        if not final_content_lines.endswith("\n"):
            final_content_lines = final_content_lines + "\n"

        File(
            filename,
            content=final_content_lines,
            owner=self.resource.owner,
            group=self.resource.group,
            mode=self.resource.mode,
            encoding=self.resource.encoding,
        )
Esempio n. 11
0
def convert_properties_to_dict(prop_file):
    dict = {}
    if sudo.path_isfile(prop_file):
        lines = sudo.read_file(prop_file).split('\n')
        for line in lines:
            props = line.rstrip().split('=')
            if len(props) == 2:
                dict[props[0]] = props[1]
            elif len(props) == 1:
                dict[props[0]] = ''
    return dict
def copy_toolkit_scripts(toolkit_files_dir, toolkit_tmp_dir, user, group,
                         upgrade_type, service):
    import params

    if service == NIFI:
        run_ca_tmp_script = os.path.join(toolkit_tmp_dir, 'run_ca.sh')
        new_run_ca_tmp_script = StaticFile("run_ca.sh")

        if not sudo.path_isfile(run_ca_tmp_script) or sudo.read_file(
                run_ca_tmp_script) != new_run_ca_tmp_script:
            File(format(run_ca_tmp_script),
                 content=new_run_ca_tmp_script,
                 mode=0755,
                 owner=user,
                 group=group)

    if not params.stack_support_nifi_toolkit_package:
        nifiToolkitDirFilesPath = None
        nifiToolkitDirTmpPath = None

        Logger.info("Toolkit files dir is " + toolkit_files_dir)
        Logger.info("Toolkit tmp dir is " + toolkit_tmp_dir)

        for dir in os.listdir(toolkit_files_dir):
            if dir.startswith('nifi-toolkit-'):
                nifiToolkitDirFilesPath = os.path.join(toolkit_files_dir, dir)
                nifiToolkitDirTmpPath = os.path.join(toolkit_tmp_dir, dir)

        if not sudo.path_isdir(nifiToolkitDirTmpPath) or not (upgrade_type is
                                                              None):
            os.system("\cp -r " + nifiToolkitDirFilesPath + " " +
                      toolkit_tmp_dir)
            Directory(nifiToolkitDirTmpPath,
                      owner=user,
                      group=group,
                      create_parents=False,
                      recursive_ownership=True,
                      cd_access="a",
                      mode=0755)
            os.system("\/var/lib/ambari-agent/ambari-sudo.sh chmod -R 755 " +
                      nifiToolkitDirTmpPath)
    else:
        Logger.info("Changing owner of package files")
        package_toolkit_dir = os.path.join(params.stack_root, 'current',
                                           'nifi-toolkit')
        Directory(package_toolkit_dir,
                  owner=user,
                  group=group,
                  create_parents=False,
                  recursive_ownership=True,
                  cd_access="a",
                  mode=0755,
                  recursion_follow_links=True)
Esempio n. 13
0
    def action_create(self):
        with Environment.get_instance_copy() as env:
            with tempfile.NamedTemporaryFile() as tmpf:
                with tempfile.NamedTemporaryFile() as old_repo_tmpf:
                    repo_file_name = format(
                        "{repo_file_name}.list",
                        repo_file_name=self.resource.repo_file_name)
                    repo_file_path = format("{repo_dir}/{repo_file_name}",
                                            repo_dir=self.repo_dir)

                    new_content = InlineTemplate(
                        self.resource.repo_template,
                        package_type=self.package_type,
                        base_url=self.resource.base_url,
                        components=' '.join(
                            self.resource.components)).get_content()
                    old_content = ''
                    if self.resource.append_to_file and os.path.isfile(
                            repo_file_path):
                        old_content = sudo.read_file(repo_file_path) + '\n'

                    File(tmpf.name, content=old_content + new_content)

                    if os.path.isfile(repo_file_path):
                        # a copy of old repo file, which will be readable by current user
                        File(
                            old_repo_tmpf.name,
                            content=StaticFile(repo_file_path),
                        )

                    if not os.path.isfile(repo_file_path) or not filecmp.cmp(
                            tmpf.name, old_repo_tmpf.name):
                        File(repo_file_path, content=StaticFile(tmpf.name))

                        update_cmd_formatted = [
                            format(x) for x in self.update_cmd
                        ]
                        # this is time expensive
                        retcode, out = checked_call(update_cmd_formatted,
                                                    sudo=True,
                                                    quiet=False)

                        # add public keys for new repos
                        missing_pkeys = set(
                            re.findall(self.missing_pkey_regex, out))
                        for pkey in missing_pkeys:
                            Execute(
                                self.app_pkey_cmd_prefix + (pkey, ),
                                timeout=
                                15,  # in case we are on the host w/o internet (using localrepo), we should ignore hanging
                                ignore_failures=True,
                                sudo=True,
                            )
Esempio n. 14
0
def get_config_by_version(config_path, config_name, version):
    import fnmatch
    if version is not None:
        for file in os.listdir(config_path):
            if fnmatch.fnmatch(file, 'command-*.json'):
                contents = sudo.read_file(config_path + '/' + file)
                version_config = json.loads(contents)
                if config_name in version_config['configurationTags'] and \
                                version_config['configurationTags'][config_name]['tag'] == version:
                    return version_config

    return {}
Esempio n. 15
0
def remove_config_version(version_file, version_type, nifi_user, nifi_group):
    if sudo.path_isfile(version_file):
        contents = sudo.read_file(version_file)
        version = json.loads(contents)
        version.pop(version_type, None)
        sudo.unlink(version_file)

        File(version_file,
             owner=nifi_user,
             group=nifi_group,
             mode=0600,
             content=json.dumps(version))
Esempio n. 16
0
    def service_check(self, env):
        import params
        env.set_params(params)

        if params.security_enabled:
            kinit_cmd = format(
                "{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal};"
            )
            Execute(kinit_cmd, user=params.smokeuser)

        output_file = format('{tmp_dir}/microsoft_r_serviceCheck.out')

        File(format("{tmp_dir}/microsoft_r_serviceCheck.r"),
             content=StaticFile("microsoft_r_serviceCheck.r"),
             mode=0755)

        Execute(format(
            "Revo64 --no-save  < {tmp_dir}/microsoft_r_serviceCheck.r | tee {output_file}"
        ),
                tries=1,
                try_sleep=1,
                path=format('/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'),
                user=params.smokeuser,
                logoutput=True)

        # Verify correct output
        from resource_management.core import sudo
        output_content = sudo.read_file(format('{output_file}'))
        import re
        values_list = re.findall(
            r"\s(Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sunday)\s+(\d*)",
            output_content)
        if 7 != len(values_list):
            Logger.info("Unable to verify output of service check run")
            raise Fail("Unable to verify output of service check run")
        dayCountDictionary = {
            'Monday': '97975',
            'Tuesday': '77725',
            'Wednesday': '78875',
            'Thursday': '81304',
            'Friday': '82987',
            'Saturday': '86159',
            'Sunday': '94975'
        }
        for (day, count) in values_list:
            if count != dayCountDictionary[day]:
                Logger.info(
                    "Service check produced incorrect output for {0}. Was expecting {1} but encountered {2}"
                    .format(day, dayCountDictionary[day], count))
                raise Fail(
                    "Service check produced incorrect output for {0}. Was expecting {1} but encountered {2}"
                    .format(day, dayCountDictionary[day], count))
Esempio n. 17
0
    def action_create(self):
        path = self.resource.path

        if sudo.path_isdir(path):
            raise Fail("Applying %s failed, directory with name %s exists" %
                       (self.resource, path))

        dirname = os.path.dirname(path)
        if not sudo.path_isdir(dirname):
            raise Fail(
                "Applying %s failed, parent directory %s doesn't exist" %
                (self.resource, dirname))

        write = False
        content = self._get_content()
        if not sudo.path_exists(path):
            write = True
            reason = "it doesn't exist"
        elif self.resource.replace:
            if content is not None:
                old_content = sudo.read_file(path,
                                             encoding=self.resource.encoding)
                if content != old_content:
                    write = True
                    reason = "contents don't match"
                    if self.resource.backup:
                        self.resource.env.backup_file(path)

        owner = self.resource.owner or 'root'
        group = self.resource.group or 'root'

        if write:
            Logger.info("Writing %s because %s" % (self.resource, reason))

            def on_file_created(filename):
                _ensure_metadata(filename,
                                 owner,
                                 group,
                                 mode=self.resource.mode,
                                 cd_access=self.resource.cd_access)
                Logger.info("Moving %s to %s" % (filename, path))

            sudo.create_file(path,
                             content,
                             encoding=self.resource.encoding,
                             on_file_created=on_file_created)
        else:
            _ensure_metadata(path,
                             owner,
                             group,
                             mode=self.resource.mode,
                             cd_access=self.resource.cd_access)
Esempio n. 18
0
 def get_content(self):
   # absolute path
   if self.name.startswith(os.path.sep):
     path = self.name
   # relative path
   else:
     basedir = self.env.config.basedir
     path = os.path.join(basedir, "files", self.name)
     
   if not os.path.isfile(path) and not os.path.islink(path):
     raise Fail("{0} Source file {1} is not found".format(repr(self), path))
   
   return sudo.read_file(path)
Esempio n. 19
0
    def read_kafka_config(self):
        import params

        kafka_config = {}
        content = sudo.read_file(params.conf_dir + "/server.properties")
        for line in content.splitlines():
            if line.startswith("#") or not line.strip():
                continue

            key, value = line.split("=")
            kafka_config[key] = value.replace("\n", "")

        return kafka_config
Esempio n. 20
0
  def read_kafka_config(self):
    import params
    
    kafka_config = {}
    content = sudo.read_file(params.conf_dir + "/server.properties")
    for line in content.splitlines():
      if line.startswith("#") or not line.strip():
        continue

      key, value = line.split("=")
      kafka_config[key] = value.replace("\n", "")
    
    return kafka_config
Esempio n. 21
0
    def get_content(self):
        # absolute path
        if self.name.startswith(os.path.sep):
            path = self.name
        # relative path
        else:
            basedir = self.env.config.basedir
            path = os.path.join(basedir, "files", self.name)

        if not os.path.isfile(path) and not os.path.islink(path):
            raise Fail("{0} Source file {1} is not found".format(
                repr(self), path))

        return sudo.read_file(path)
Esempio n. 22
0
 def _configure_pg_hba_ambaridb_users(conf_file, database_username):
   conf_file_content_in = sudo.read_file(conf_file)
   conf_file_content_out = conf_file_content_in
   conf_file_content_out += "\n"
   conf_file_content_out += "local  all  " + database_username + ",mapred md5"
   conf_file_content_out += "\n"
   conf_file_content_out += "host  all   " + database_username + ",mapred 0.0.0.0/0  md5"
   conf_file_content_out += "\n"
   conf_file_content_out += "host  all   " + database_username + ",mapred ::/0 md5"
   conf_file_content_out += "\n"
   sudo.create_file(conf_file, conf_file_content_out)
   retcode, out, err = run_os_command(PGConfig.PG_HBA_RELOAD_CMD)
   if not retcode == 0:
     raise FatalException(retcode, err)
Esempio n. 23
0
  def post_start(self, env=None):
    pid_files = self.get_pid_files()
    if pid_files == []:
      Logger.logger.warning("Pid files for current script are not defined")
      return

    pids = []
    for pid_file in pid_files:
      if not sudo.path_exists(pid_file):
        raise Fail("Pid file {0} doesn't exist after starting of the component.".format(pid_file))

      pids.append(sudo.read_file(pid_file).strip())

    Logger.info("Component has started with pid(s): {0}".format(', '.join(pids)))
Esempio n. 24
0
  def stop(self, env, rolling_restart=False):
    import params
    env.set_params(params)
    self.configure(env)
    try:
        pid = int(sudo.read_file(params.kafka_manager_pid_file))
        code, out = shell.call(["kill","-15", str(pid)])
    except:
        Logger.warning("Pid file {0} does not exist".format(params.kafka_manager_pid_file))
        return

    if code:
       Logger.warning("Process with pid {0} is not running. Stale pid file"
                 " at {1}".format(pid, params.kafka_manager_pid_file))
Esempio n. 25
0
def contains_providers(provider_file, tag):
    from xml.dom.minidom import parseString
    import xml.dom.minidom

    if sudo.path_isfile(provider_file):
        content = sudo.read_file(provider_file)
        dom = xml.dom.minidom.parseString(content)
        collection = dom.documentElement
        if collection.getElementsByTagName(tag):
            return True
        else:
            return False

    else:
        return False
Esempio n. 26
0
 def action_create(self):
   with Environment.get_instance_copy() as env:
     repo_file_name = self.resource.repo_file_name
     repo_dir = get_repo_dir()
     new_content = InlineTemplate(self.resource.repo_template, repo_id=self.resource.repo_id, repo_file_name=self.resource.repo_file_name,
                            base_url=self.resource.base_url, mirror_list=self.resource.mirror_list)
     repo_file_path = format("{repo_dir}/{repo_file_name}.repo")
     if self.resource.append_to_file and os.path.isfile(repo_file_path):
       content = sudo.read_file(repo_file_path) + '\n' + new_content.get_content()
     else:
       content = new_content
       
     File(repo_file_path, 
          content=content
     )
Esempio n. 27
0
 def stop(self, env):
     import params
     import status_params
     from resource_management.core import sudo
     if params.flink_start_on_yarn:
         Logger.info("flink stop on yarn")
         pid = str(sudo.read_file(status_params.flink_pid_file))
         Execute('yarn application -kill ' + pid, user=params.flink_user)
         Execute('rm ' + status_params.flink_pid_file, ignore_failures=True)
     else:
         Logger.info("flink stop cluster")
         Execute("/usr/hdp/2.5.0.0-1245/flink/bin//stop-cluster.sh",
                 user="******",
                 logoutput=True)
         cmd = format("rm -rf {status_params.jobmanager_pid_file}")
         Execute(cmd)
Esempio n. 28
0
def save_config_version(version_file, version_type, version_num,
                        nifi_registry_user, nifi_registry_group):
    version = {}
    if sudo.path_isfile(version_file):
        contents = sudo.read_file(version_file)
        version = json.loads(contents)
        version[version_type] = version_num
        sudo.unlink(version_file)
    else:
        version[version_type] = version_num

    File(version_file,
         owner=nifi_registry_user,
         group=nifi_registry_group,
         mode=0600,
         content=json.dumps(version))
Esempio n. 29
0
    def stop(self, env):
        import status_params

        if path_isfile(status_params.nifi_ca_pid_file):
            try:
                self.status(env)
                pid = int(read_file(status_params.nifi_ca_pid_file))
                for i in range(25):
                    kill(pid, SIGTERM)
                    time.sleep(1)
                    self.status(env)
                kill(pid, SIGKILL)
                time.sleep(5)
                self.status(env)
            except ComponentIsNotRunning:
                unlink(status_params.nifi_ca_pid_file)
  def stop(self, env):
    import status_params
    env.set_params(status_params)

    if not status_params.archiver_pid_file or not os.path.isfile(status_params.archiver_pid_file):
      Logger.info("Pid file {0} is empty or does not exist".format(str(status_params.archiver_pid_file)))
      raise ComponentIsNotRunning()

    try:
      pid = int(sudo.read_file(status_params.archiver_pid_file))
    except:
      Logger.info("Pid file {0} does not exist or does not contain a process id number".format(status_params.archiver_pid_file))
      raise ComponentIsNotRunning()

    stop_cmd = format("kill -9 {pid}")
    Execute(stop_cmd)
Esempio n. 31
0
 def check_flink_status(self, pid_file):
     from resource_management.core.exceptions import ComponentIsNotRunning
     from resource_management.core import sudo
     from subprocess import PIPE, Popen
     import shlex
     if not os.path.exists(pid_file) or not os.path.isfile(pid_file):
         raise ComponentIsNotRunning()
     try:
         pid = str(sudo.read_file(pid_file))
         cmd_line = "/usr/bin/yarn application -list"
         args = shlex.split(cmd_line)
         proc = Popen(args, stdout=PIPE)
         p = str(proc.communicate()[0].split())
         if p.find(pid.strip()) < 0:
             raise ComponentIsNotRunning()
     except Exception, e:
         raise ComponentIsNotRunning()
Esempio n. 32
0
    def action_create(self):
        path = self.resource.path

        if os.path.isdir(path):
            raise Fail("Applying %s failed, directory with name %s exists" %
                       (self.resource, path))

        dirname = os.path.dirname(path)
        if not os.path.isdir(dirname):
            raise Fail(
                "Applying %s failed, parent directory %s doesn't exist" %
                (self.resource, dirname))

        write = False
        content = self._get_content()
        if not os.path.exists(path):
            write = True
            reason = "it doesn't exist"
        elif self.resource.replace:
            if content is not None:
                old_content = sudo.read_file(path)
                old_content = old_content.decode(
                    self.resource.encoding
                ) if self.resource.encoding else old_content
                if content != old_content:
                    write = True
                    reason = "contents don't match"
                    if self.resource.backup:
                        self.resource.env.backup_file(path)

        if write:
            Logger.info("Writing %s because %s" % (self.resource, reason))

            if content:
                content = content.encode(
                    self.resource.encoding
                ) if self.resource.encoding else content

            sudo.create_file(path, content)

        _ensure_metadata(self.resource.path,
                         self.resource.owner,
                         self.resource.group,
                         mode=self.resource.mode,
                         cd_access=self.resource.cd_access)
Esempio n. 33
0
 def check_flink_status(self, pid_file):
   from datetime import datetime 
   from resource_management.core.exceptions import ComponentIsNotRunning
   from resource_management.core import sudo
   from subprocess import PIPE,Popen
   import shlex, subprocess
   if not pid_file or not os.path.isfile(pid_file):
     raise ComponentIsNotRunning()
   try:
     pid = str(sudo.read_file(pid_file)) 
     cmd_line = "/usr/bin/yarn application -list"
     args = shlex.split(cmd_line)
     proc = Popen(args, stdout=PIPE)
     p = str(proc.communicate()[0].split())
     if p.find(pid.strip()) < 0:
       raise ComponentIsNotRunning() 
   except Exception, e:
     raise ComponentIsNotRunning()
Esempio n. 34
0
  def get_interpreter_settings(self):
    import params
    import json

    interpreter_config = os.path.join(params.conf_dir, "interpreter.json")
    if params.conf_stored_in_hdfs:
      zeppelin_conf_fs = self.get_zeppelin_conf_FS(params)

      if self.is_file_exists_in_HDFS(zeppelin_conf_fs, params.zeppelin_user):
        # copy from hdfs to /etc/zeppelin/conf/interpreter.json
        kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths',None))
        kinit_if_needed = format("{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};")
        shell.call(format("rm {interpreter_config};"
                          "{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -get {zeppelin_conf_fs} {interpreter_config}"),
                   user=params.zeppelin_user)

    config_content = sudo.read_file(interpreter_config)
    config_data = json.loads(config_content)
    return config_data
Esempio n. 35
0
  def start(self, env):
    import params
    env.set_params(params)
    self.configure(env, action = 'start')

    start_cmd = format("{ams_grafana_script} start")
    Execute(start_cmd,
            user=params.ams_user,
            not_if = params.grafana_process_exists_cmd,
            )
    pidfile = format("{ams_grafana_pid_dir}/grafana-server.pid")
    if not sudo.path_exists(pidfile):
      Logger.warn("Pid file doesn't exist after starting of the component.")
    else:
      Logger.info("Grafana Server has started with pid: {0}".format(sudo.read_file(pidfile).strip()))

    # Create datasource
    create_ams_datasource()
    # Create pre-built dashboards
    create_ams_dashboards()
  def action_create(self):
    filename = self.resource.filename
    comment_symbols = self.resource.comment_symbols
    delimiter = self.resource.key_value_delimiter
    properties = self.resource.properties
    unsaved_values = properties.keys()
    new_content_lines = []
    
    if sudo.path_isfile(filename):
      file_content = sudo.read_file(filename, encoding=self.resource.encoding)
      new_content_lines += file_content.split('\n')

      Logger.info(format("Modifying existing properties file: {filename}"))
      
      for line_num in range(len(new_content_lines)):
        line = new_content_lines[line_num]
        
        if line.lstrip() and not line.lstrip()[0] in comment_symbols and delimiter in line:
          in_var_name = line.split(delimiter)[0].strip()
          in_var_value = line.split(delimiter)[1].strip()
          
          if in_var_name in properties:
            value = InlineTemplate(unicode(properties[in_var_name])).get_content()
            new_content_lines[line_num] = u"{0}{1}{2}".format(unicode(in_var_name), delimiter, value)
            unsaved_values.remove(in_var_name)
    else:
      Logger.info(format("Creating new properties file as {filename} doesn't exist"))
       
    for property_name in unsaved_values:
      value = InlineTemplate(unicode(properties[property_name])).get_content()
      line = u"{0}{1}{2}".format(unicode(property_name), delimiter, value)
      new_content_lines.append(line)
          
    with Environment.get_instance_copy() as env:
      File (filename,
            content = u"\n".join(new_content_lines) + "\n",
            owner = self.resource.owner,
            group = self.resource.group,
            mode = self.resource.mode,
            encoding = self.resource.encoding,
      )
Esempio n. 37
0
 def action_create(self):
   with Environment.get_instance_copy() as env:
     with tempfile.NamedTemporaryFile() as tmpf:
       with tempfile.NamedTemporaryFile() as old_repo_tmpf:
         repo_file_name = format("{repo_file_name}.list",repo_file_name=self.resource.repo_file_name)
         repo_file_path = format("{repo_dir}/{repo_file_name}", repo_dir=self.repo_dir)
 
         new_content = InlineTemplate(self.resource.repo_template, package_type=self.package_type,
                                       base_url=self.resource.base_url,
                                       components=' '.join(self.resource.components)).get_content()
         old_content = ''
         if self.resource.append_to_file and os.path.isfile(repo_file_path):
             old_content = sudo.read_file(repo_file_path) + '\n'
 
         File(tmpf.name, 
              content=old_content+new_content
         )
         
         if os.path.isfile(repo_file_path):
           # a copy of old repo file, which will be readable by current user
           File(old_repo_tmpf.name, 
                content=StaticFile(repo_file_path),
           )
 
         if not os.path.isfile(repo_file_path) or not filecmp.cmp(tmpf.name, old_repo_tmpf.name):
           File(repo_file_path,
                content = StaticFile(tmpf.name)
           )
           
           update_cmd_formatted = [format(x) for x in self.update_cmd]
           # this is time expensive
           retcode, out = checked_call(update_cmd_formatted, sudo=True, quiet=False)
           
           # add public keys for new repos
           missing_pkeys = set(re.findall(self.missing_pkey_regex, out))
           for pkey in missing_pkeys:
             Execute(self.app_pkey_cmd_prefix + (pkey,),
                     timeout = 15, # in case we are on the host w/o internet (using localrepo), we should ignore hanging
                     ignore_failures = True,
                     sudo = True,
             )
Esempio n. 38
0
    def get_content(self):
        if self.download_path and not os.path.exists(self.download_path):
            raise Fail(
                "Directory {0} doesn't exist, please provide valid download path"
                .format(self.download_path))

        if urlparse.urlparse(self.url).path:
            filename = os.path.basename(urlparse.urlparse(self.url).path)
        else:
            filename = 'index.html.{0}'.format(time.time())

        filepath = os.path.join(self.download_path, filename)

        if not self.cache or not os.path.exists(filepath):
            Logger.info("Downloading the file from {0}".format(self.url))

            if self.ignore_proxy:
                opener = urllib2.build_opener(urllib2.ProxyHandler({}))
            else:
                opener = urllib2.build_opener()

            req = urllib2.Request(self.url)

            try:
                web_file = opener.open(req)
            except urllib2.HTTPError as ex:
                raise Fail(
                    "Failed to download file from {0} due to HTTP error: {1}".
                    format(self.url, str(ex)))

            content = web_file.read()

            if self.cache:
                sudo.create_file(filepath, content)
        else:
            Logger.info(
                "Not downloading the file from {0}, because {1} already exists"
                .format(self.url, filepath))
            content = sudo.read_file(filepath)

        return content
Esempio n. 39
0
 def check_flink_status(self, pid_file):
   from datetime import datetime 
   from resource_management.core.exceptions import ComponentIsNotRunning
   from resource_management.core import sudo
   from subprocess import PIPE,Popen
   import shlex, subprocess
   #/var/run/flink/flink.pid
   if not pid_file or not os.path.isfile(pid_file):
     raise ComponentIsNotRunning()
   try:
     pid = str(sudo.read_file(pid_file)) 
     cmd_line = "/usr/bin/yarn application -list"
     #shlex.split()可以被用于序列化复杂的命令参数
     #>>> shlex.split('ls ps top grep pkill')
     #['ls', 'ps', 'top', 'grep', 'pkill']
     args = shlex.split(cmd_line)
     proc = Popen(args, stdout=PIPE)
     p = str(proc.communicate()[0].split())
     if p.find(pid.strip()) < 0:
       raise ComponentIsNotRunning() 
   except Exception, e:
     raise ComponentIsNotRunning()
Esempio n. 40
0
    def encrypt_sensitive_properties(self, config_version_file, current_version, nifi_config_dir, jdk64_home, nifi_user,
                                     nifi_group, master_key_password, nifi_flow_config_dir, nifi_sensitive_props_key,
                                     is_starting):
        Logger.info("Encrypting NiFi sensitive configuration properties")
        encrypt_config_script = nifi_toolkit_util.get_toolkit_script('encrypt-config.sh')
        encrypt_config_script_prefix = ('JAVA_HOME=' + jdk64_home, encrypt_config_script)
        File(encrypt_config_script, mode=0755)

        if is_starting:
            last_master_key_password = None
            last_config_version = nifi_toolkit_util.get_config_version(config_version_file, 'encrypt')
            encrypt_config_script_params = ('-v', '-b', nifi_config_dir + '/bootstrap.conf')
            encrypt_config_script_params = encrypt_config_script_params + ('-n', nifi_config_dir + '/nifi.properties')

            if sudo.path_isfile(nifi_flow_config_dir + '/flow.xml.gz') and len(
                    sudo.read_file(nifi_flow_config_dir + '/flow.xml.gz')) > 0:
                encrypt_config_script_params = encrypt_config_script_params + (
                    '-f', nifi_flow_config_dir + '/flow.xml.gz', '-s', PasswordString(nifi_sensitive_props_key))

            if nifi_toolkit_util.contains_providers(nifi_config_dir + '/login-identity-providers.xml'):
                encrypt_config_script_params = encrypt_config_script_params + (
                    '-l', nifi_config_dir + '/login-identity-providers.xml')

            if last_config_version:
                last_config = nifi_toolkit_util.get_config_by_version('/var/lib/ambari-agent/data',
                                                                      'nifi-ambari-config', last_config_version)
                last_master_key_password = last_config['configurations']['nifi-ambari-config'][
                    'nifi.security.encrypt.configuration.password']

            if last_master_key_password and last_master_key_password != master_key_password:
                encrypt_config_script_params = encrypt_config_script_params + (
                    '-m', '-w', PasswordString(last_master_key_password))

            encrypt_config_script_params = encrypt_config_script_params + ('-p', PasswordString(master_key_password))
            encrypt_config_script_prefix = encrypt_config_script_prefix + encrypt_config_script_params
            Execute(encrypt_config_script_prefix, user=nifi_user, logoutput=False)
            nifi_toolkit_util.save_config_version(config_version_file, 'encrypt', current_version, nifi_user,
                                                  nifi_group)
Esempio n. 41
0
    def get_interpreter_settings(self):
        import params
        import json

        interpreter_config = os.path.join(params.conf_dir, "interpreter.json")
        if 'zeppelin.notebook.storage' in params.config['configurations']['zeppelin-config'] \
          and params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] == 'org.apache.zeppelin.notebook.repo.FileSystemNotebookRepo':

            if 'zeppelin.config.fs.dir' in params.config['configurations'][
                    'zeppelin-config']:
                zeppelin_conf_fs = self.get_zeppelin_conf_FS(params)

                if self.is_path_exists_in_HDFS(zeppelin_conf_fs,
                                               params.zeppelin_user):
                    # copy from hdfs to /etc/zeppelin/conf/interpreter.json
                    kinit_path_local = get_kinit_path(
                        default(
                            '/configurations/kerberos-env/executable_search_paths',
                            None))
                    kinit_if_needed = format(
                        "{kinit_path_local} -kt {zeppelin_kerberos_keytab} {zeppelin_kerberos_principal};"
                    )
                    shell.call(format(
                        "rm {interpreter_config};"
                        "{kinit_if_needed} hdfs --config {hadoop_conf_dir} dfs -get {zeppelin_conf_fs} {interpreter_config}"
                    ),
                               user=params.zeppelin_user)
                else:
                    Logger.info(
                        format(
                            "{zeppelin_conf_fs} does not exist. Skipping upload of DFS."
                        ))

        config_content = sudo.read_file(interpreter_config)
        config_data = json.loads(config_content)
        return config_data
Esempio n. 42
0
  def action_create(self):
    with tempfile.NamedTemporaryFile() as tmpf:
      with tempfile.NamedTemporaryFile() as old_repo_tmpf:
        repo_file_name = format("{repo_file_name}.list",repo_file_name=self.resource.repo_file_name)
        repo_file_path = format("{repo_dir}/{repo_file_name}", repo_dir=self.repo_dir)

        new_content = InlineTemplate(self.resource.repo_template, package_type=self.package_type,
                                      base_url=self.resource.base_url,
                                      components=' '.join(self.resource.components)).get_content()
        old_content = ''
        if self.resource.append_to_file and os.path.isfile(repo_file_path):
            old_content = sudo.read_file(repo_file_path) + '\n'

        File(tmpf.name, 
             content=old_content+new_content
        )
        
        if os.path.isfile(repo_file_path):
          # a copy of old repo file, which will be readable by current user
          File(old_repo_tmpf.name, 
               content=StaticFile(repo_file_path),
          )

        if not os.path.isfile(repo_file_path) or not filecmp.cmp(tmpf.name, old_repo_tmpf.name):
          File(repo_file_path,
               content = StaticFile(tmpf.name)
          )
          
          try:
            self.update(repo_file_path)
          except:
            # remove created file or else ambari will consider that update was successful and skip repository operations
            File(repo_file_path,
                 action = "delete",
            )
            raise
Esempio n. 43
0
 def read_file(self, path):
     from resource_management.core import sudo
     return sudo.read_file(path)