Ejemplo n.º 1
0
def dumpJson(json_object, dump_file):
  try:
    with open(dump_file, 'w') as out:
      json.dump(json_object, out, indent=1)
  except Exception as err:
    traceback.print_exc()
    raise StackAdvisorException("Error writing to file {0} : {1}".format(dump_file, str(err)))
Ejemplo n.º 2
0
 def put_structured_out(self, sout):
   Script.structuredOut.update(sout)
   try:
     with open(self.stroutfile, 'w') as fp:
       json.dump(Script.structuredOut, fp)
   except IOError, err:
     Script.structuredOut.update({"errMsg" : "Unable to write to " + self.stroutfile})
Ejemplo n.º 3
0
 def put_structured_out(self, sout):
   Script.structuredOut.update(sout)
   try:
     with open(self.stroutfile, 'w') as fp:
       json.dump(Script.structuredOut, fp)
   except IOError, err:
     Script.structuredOut.update({"errMsg" : "Unable to write to " + self.stroutfile})
Ejemplo n.º 4
0
  def update_definitions(self, heartbeat):
    """
    Updates the persisted alert definitions JSON.
    :param heartbeat:
    :return:
    """
    if 'alertDefinitionCommands' not in heartbeat:
      logger.warning("There are no alert definition commands in the heartbeat; unable to update definitions")
      return

    # prune out things we don't want to store
    alert_definitions = []
    for command in heartbeat['alertDefinitionCommands']:
      command_copy = command.copy()

      # no need to store these since we always use the in-memory cached values
      if 'configurations' in command_copy:
        del command_copy['configurations']

      alert_definitions.append(command_copy)

    # write out the new definitions
    with open(os.path.join(self.cachedir, self.FILENAME), 'w') as f:
      json.dump(alert_definitions, f, indent=2)

    # reschedule only the jobs that have changed
    self.reschedule()
Ejemplo n.º 5
0
  def delete_storm_local_data(self, env):
    """
    Deletes Storm data from local directories. This will create a marker file
    with JSON data representing the upgrade stack and request/stage ID. This
    will prevent multiple Storm components on the same host from removing
    the local directories more than once.
    :return:
    """
    import params

    Logger.info('Clearing Storm data from local directories...')

    storm_local_directory = params.local_dir
    if storm_local_directory is None:
      raise Fail("The storm local directory specified by storm-site/storm.local.dir must be specified")

    request_id = default("/requestId", None)
    stage_id = default("/stageId", None)
    stack_version = params.version
    stack_name = params.stack_name

    json_map = {}
    json_map["requestId"] = request_id
    json_map["stageId"] = stage_id
    json_map["stackVersion"] = stack_version
    json_map["stackName"] = stack_name

    temp_directory = params.tmp_dir
    upgrade_file = os.path.join(temp_directory, "storm-upgrade-{0}.json".format(stack_version))

    if os.path.exists(upgrade_file):
      try:
        with open(upgrade_file) as file_pointer:
          existing_json_map = json.load(file_pointer)

        if cmp(json_map, existing_json_map) == 0:
          Logger.info("The storm upgrade has already removed the local directories for {0}-{1} for request {2} and stage {3}".format(
            stack_name, stack_version, request_id, stage_id))

          # nothing else to do here for this as it appears to have already been
          # removed by another component being upgraded
          return

      except:
        Logger.error("The upgrade file {0} appears to be corrupt; removing...".format(upgrade_file))
        File(upgrade_file, action="delete")
    else:
      # delete the upgrade file since it does not match
      File(upgrade_file, action="delete")

    # delete from local directory
    Directory(storm_local_directory, action="delete", recursive=True)

    # recreate storm local directory
    Directory(storm_local_directory, mode=0755, owner = params.storm_user,
      group = params.user_group, recursive = True)

    # the file doesn't exist, so create it
    with open(upgrade_file, 'w') as file_pointer:
      json.dump(json_map, file_pointer, indent=2)
Ejemplo n.º 6
0
def new_cached_exec(key, file_path, kinit_path, temp_dir, exec_user,
                    keytab_file, principal, hostname):
    """
  Entry point of an actual execution - triggered when timeout on the cache expired or on fresh execution
  """
    now = datetime.now()
    temp_kinit_cache_fd, temp_kinit_cache_filename = mkstemp(dir=temp_dir)
    command = "%s -c %s -kt %s %s" % \
              (kinit_path, temp_kinit_cache_filename, keytab_file,
               principal.replace("_HOST", hostname))

    os.close(temp_kinit_cache_fd)

    try:
        # Ensure the proper user owns this file
        File(temp_kinit_cache_filename, owner=exec_user, mode=0600)

        # Execute the kinit
        Execute(command, user=exec_user)

        with open(file_path, 'w+') as cache_file:
            result = {key: {"last_successful_execution": str(now)}}
            json.dump(result, cache_file)
    finally:
        File(temp_kinit_cache_filename, action='delete')
Ejemplo n.º 7
0
  def persist_cache(self):
    # ensure that our cache directory exists
    if not os.path.exists(self.cluster_cache_dir):
      os.makedirs(self.cluster_cache_dir)

    with self.__file_lock:
      with open(self.__current_cache_json_file, 'w') as f:
        json.dump(self, f, indent=2)

      if self.hash is not None:
        with open(self.__current_cache_hash_file, 'w') as fp:
          fp.write(self.hash)
Ejemplo n.º 8
0
  def persist_cache(self, cache_hash):
    # ensure that our cache directory exists
    if not os.path.exists(self.cluster_cache_dir):
      os.makedirs(self.cluster_cache_dir)

    with self.__file_lock:
      with open(self.__current_cache_json_file, 'w') as f:
        json.dump(self, f, indent=2)

      if self.hash is not None:
        with open(self.__current_cache_hash_file, 'w') as fp:
          fp.write(cache_hash)

    # if all of above are successful finally set the hash
    self.hash = cache_hash
Ejemplo n.º 9
0
def new_cached_exec(key, file_path, kinit_path, exec_user, keytab_file,
                    principal, hostname):
    """
  Entry point of an actual execution - triggered when timeout on the cache expired or on fresh execution
  """
    now = datetime.now()
    _, temp_kinit_cache_file = mkstemp()
    command = "su -s /bin/bash - %s -c '%s -c %s -kt %s %s'" % \
              (exec_user, kinit_path, temp_kinit_cache_file, keytab_file,
               principal.replace("_HOST", hostname))

    try:
        Execute(command)

        with open(file_path, 'w+') as cache_file:
            result = {key: {"last_successful_execution": str(now)}}
            json.dump(result, cache_file)
    finally:
        os.remove(temp_kinit_cache_file)
def new_cached_exec(key, file_path, kinit_path, temp_dir, exec_user, keytab_file, principal, hostname):
  """
  Entry point of an actual execution - triggered when timeout on the cache expired or on fresh execution
  """
  now = datetime.now()
  temp_kinit_cache_fd, temp_kinit_cache_filename = mkstemp(dir=temp_dir)
  command = "%s -c %s -kt %s %s" % \
            (kinit_path, temp_kinit_cache_filename, keytab_file,
             principal.replace("_HOST", hostname))

  os.close(temp_kinit_cache_fd)

  try:
    Execute(command, user=exec_user)

    with open(file_path, 'w+') as cache_file:
      result = {key: {"last_successful_execution": str(now)}}
      json.dump(result, cache_file)
  finally:
    File(temp_kinit_cache_filename, action='delete')
Ejemplo n.º 11
0
    def delete_storm_local_data(self, env):
        """
    Deletes Storm data from local directories. This will create a marker file
    with JSON data representing the upgrade stack and request/stage ID. This
    will prevent multiple Storm components on the same host from removing
    the local directories more than once.
    :return:
    """
        import params

        Logger.info('Clearing Storm data from local directories...')

        storm_local_directory = params.local_dir
        if storm_local_directory is None:
            raise Fail(
                "The storm local directory specified by storm-site/storm.local.dir must be specified"
            )

        request_id = default("/requestId", None)
        stage_id = default("/stageId", None)
        stack_version = params.version
        stack_name = params.stack_name

        json_map = {}
        json_map["requestId"] = request_id
        json_map["stageId"] = stage_id
        json_map["stackVersion"] = stack_version
        json_map["stackName"] = stack_name

        temp_directory = params.tmp_dir
        upgrade_file = os.path.join(
            temp_directory, "storm-upgrade-{0}.json".format(stack_version))

        if os.path.exists(upgrade_file):
            try:
                with open(upgrade_file) as file_pointer:
                    existing_json_map = json.load(file_pointer)

                if cmp(json_map, existing_json_map) == 0:
                    Logger.info(
                        "The storm upgrade has already removed the local directories for {0}-{1} for request {2} and stage {3}"
                        .format(stack_name, stack_version, request_id,
                                stage_id))

                    # nothing else to do here for this as it appears to have already been
                    # removed by another component being upgraded
                    return

            except:
                Logger.error(
                    "The upgrade file {0} appears to be corrupt; removing...".
                    format(upgrade_file))
                File(upgrade_file, action="delete")
        else:
            # delete the upgrade file since it does not match
            File(upgrade_file, action="delete")

        # delete from local directory
        Directory(storm_local_directory, action="delete", recursive=True)

        # recreate storm local directory
        Directory(storm_local_directory,
                  mode=0755,
                  owner=params.storm_user,
                  group=params.user_group,
                  recursive=True)

        # the file doesn't exist, so create it
        with open(upgrade_file, 'w') as file_pointer:
            json.dump(json_map, file_pointer, indent=2)
Ejemplo n.º 12
0
def write_config(config, cfg_type, tag):
  file_name = cfg_type + "_" + tag
  if os.path.isfile(file_name):
    os.remove(file_name)
  json.dump(config, open(file_name, 'w'))
Ejemplo n.º 13
0
                      "request {2} and direction {3}. Nothing else to do.".format(stack_name, stack_version, request_id, upgrade_direction))

          # Nothing else to do here for this as it appears to have already been
          # removed by another component being upgraded
          return
        else:
          Logger.info("The marker file differs from the new value. Will proceed to delete Storm local dir, "
                      "and generate new file. Current marker file: {0}".format(str(existing_json_map)))
      except Exception, e:
        Logger.error("The marker file {0} appears to be corrupt; removing it. Error: {1}".format(marker_file, str(e)))
        File(marker_file, action="delete")
    else:
      Logger.info('The marker file {0} does not exist; will attempt to delete local Storm directory if it exists.'.format(marker_file))

    # Delete from local directory
    if os.path.isdir(storm_local_directory):
      Logger.info("Deleting storm local directory, {0}".format(storm_local_directory))
      Directory(storm_local_directory, action="delete", recursive=True)

    # Recreate storm local directory
    Logger.info("Recreating storm local directory, {0}".format(storm_local_directory))
    Directory(storm_local_directory, mode=0755, owner=params.storm_user,
      group=params.user_group, recursive=True)

    # The file doesn't exist, so create it
    Logger.info("Saving marker file to {0} with contents: {1}".format(marker_file, str(json_map)))
    with open(marker_file, 'w') as file_pointer:
      json.dump(json_map, file_pointer, indent=2)

if __name__ == "__main__":
  StormUpgrade().execute()
Ejemplo n.º 14
0
    """
    logger.info("Updating cached configurations for cluster {0}".format(cluster_name))

    self.__cache_lock.acquire()
    try:
      self.__configurations[cluster_name] = configuration
    except Exception, exception :
      logger.exception("Unable to update configurations for cluster {0}".format(cluster_name))
    finally:
      self.__cache_lock.release()


    self.__file_lock.acquire()
    try:
      with open(self.__config_json_file, 'w') as f:
        json.dump(self.__configurations, f, indent=2)
    except Exception, exception :
      logger.exception("Unable to update configurations for cluster {0}".format(cluster_name))
    finally:
      self.__file_lock.release()


  def get_configuration_value(self, cluster_name, key):
    """
    Gets a value from the cluster configuration map for the given cluster and
    key. The key is expected to be of the form 'foo-bar/baz' or
    'foo-bar/bar-baz/foobarbaz' where every / denotes a new mapping
    :param key:  a lookup key, like 'foo-bar/baz'
    :return: the value, or None if not found
    """
    self.__cache_lock.acquire()
Ejemplo n.º 15
0
 def write_file(self, filename, tags):
     runDir = self.findRunDir()
     conf_file = open(os.path.join(runDir, filename), 'w')
     json.dump(tags, conf_file)
     conf_file.close()
Ejemplo n.º 16
0
            Logger.info(
                'The marker file {0} does not exist; will attempt to delete local Storm directory if it exists.'
                .format(marker_file))

        # Delete from local directory
        if os.path.isdir(storm_local_directory):
            Logger.info("Deleting storm local directory, {0}".format(
                storm_local_directory))
            Directory(storm_local_directory,
                      action="delete",
                      create_parents=True)

        # Recreate storm local directory
        Logger.info("Recreating storm local directory, {0}".format(
            storm_local_directory))
        Directory(storm_local_directory,
                  mode=0755,
                  owner=params.storm_user,
                  group=params.user_group,
                  create_parents=True)

        # The file doesn't exist, so create it
        Logger.info("Saving marker file to {0} with contents: {1}".format(
            marker_file, str(json_map)))
        with open(marker_file, 'w') as file_pointer:
            json.dump(json_map, file_pointer, indent=2)


if __name__ == "__main__":
    StormUpgrade().execute()
Ejemplo n.º 17
0
    def delete_storm_local_data(self, env):
        """
        Deletes Storm data from local directories. This will create a marker file
        with JSON data representing the upgrade stack and request/stage ID. This
        will prevent multiple Storm components on the same host from removing
        the local directories more than once.
        :return:
        """
        import params

        Logger.info('Clearing Storm data from local directories...')

        storm_local_directory = params.local_dir
        if storm_local_directory is None:
            raise Fail(
                "The storm local directory specified by storm-site/storm.local.dir must be specified"
            )

        request_id = default("/requestId", None)

        stack_name = params.stack_name
        stack_version = params.version
        upgrade_direction = params.upgrade_direction

        json_map = {}
        json_map["requestId"] = request_id
        json_map["stackName"] = stack_name
        json_map["stackVersion"] = stack_version
        json_map["direction"] = upgrade_direction

        temp_directory = params.tmp_dir
        marker_file = os.path.join(
            temp_directory, "storm-upgrade-{0}.json".format(stack_version))
        Logger.info("Marker file for upgrade/downgrade of Storm, {0}".format(
            marker_file))

        if os.path.exists(marker_file):
            Logger.info("The marker file exists.")
            try:
                with open(marker_file) as file_pointer:
                    existing_json_map = json.load(file_pointer)

                if cmp(json_map, existing_json_map) == 0:
                    Logger.info(
                        "The storm upgrade has already removed the local directories for {0}-{1} for "
                        "request {2} and direction {3}. Nothing else to do.".
                        format(stack_name, stack_version, request_id,
                               upgrade_direction))

                    # Nothing else to do here for this as it appears to have already been
                    # removed by another component being upgraded
                    return
                else:
                    Logger.info(
                        "The marker file differs from the new value. Will proceed to delete Storm local dir, "
                        "and generate new file. Current marker file: {0}".
                        format(str(existing_json_map)))
            except Exception as e:
                Logger.error(
                    "The marker file {0} appears to be corrupt; removing it. Error: {1}"
                    .format(marker_file, str(e)))
                File(marker_file, action="delete")
        else:
            Logger.info(
                'The marker file {0} does not exist; will attempt to delete local Storm directory if it exists.'
                .format(marker_file))

        # Delete from local directory
        if os.path.isdir(storm_local_directory):
            Logger.info("Deleting storm local directory, {0}".format(
                storm_local_directory))
            Directory(storm_local_directory,
                      action="delete",
                      create_parents=True)

        # Recreate storm local directory
        Logger.info("Recreating storm local directory, {0}".format(
            storm_local_directory))
        Directory(storm_local_directory,
                  mode=0755,
                  owner=params.storm_user,
                  group=params.user_group,
                  create_parents=True)

        # The file doesn't exist, so create it
        Logger.info("Saving marker file to {0} with contents: {1}".format(
            marker_file, str(json_map)))
        with open(marker_file, 'w') as file_pointer:
            json.dump(json_map, file_pointer, indent=2)
Ejemplo n.º 18
0
def write_mapping(hostmapping):
  if os.path.isfile(Options.MR_MAPPING_FILE):
    os.remove(Options.MR_MAPPING_FILE)
  json.dump(hostmapping, open(Options.MR_MAPPING_FILE, 'w'))