Example #1
0
  def WriteHeader(self):
    """Writes the header to the output."""
    # Start by finding out which fields are to be used.
    self.fields = []

    if self._filter:
      self.fields = self._filter.fields
      self.separator = self._filter.separator
    else:
      self.separator = u','

    if not self.fields:
      # TODO: Evaluate which fields should be included by default.
      self.fields = [
          'datetime', 'timestamp_desc', 'source', 'source_long',
          'message', 'parser', 'display_name', 'tag', 'store_number',
          'store_index']

    if self.store:
      self._hostnames = helper.BuildHostDict(self.store)
      self._preprocesses = {}
      for info in self.store.GetStorageInformation():
        if hasattr(info, 'store_range'):
          for store_number in range(info.store_range[0], info.store_range[1]):
            self._preprocesses[store_number] = info

    self.filehandle.WriteLine('{0:s}\n'.format(
        self.separator.join(self.fields)))
Example #2
0
 def Start(self):
     """Returns a header for the output."""
     # Build a hostname and username dict objects.
     self._hostnames = {}
     if self.store:
         self._hostnames = helper.BuildHostDict(self.store)
         self._preprocesses = {}
         for info in self.store.GetStorageInformation():
             if hasattr(info, 'store_range'):
                 for store_number in range(info.store_range[0],
                                           info.store_range[1] + 1):
                     self._preprocesses[store_number] = info
     self.filehandle.WriteLine(u'Time|Source|Host|User|Description\n')
Example #3
0
    def Start(self):
        """Returns a header for the output."""
        # Build a hostname and username dict objects.
        self._hostnames = {}
        if self.store:
            self._hostnames = helper.BuildHostDict(self.store)
            self._preprocesses = {}
            for info in self.store.GetStorageInformation():
                if hasattr(info, 'store_range'):
                    for store_number in range(info.store_range[0],
                                              info.store_range[1] + 1):
                        self._preprocesses[store_number] = info

        self.filehandle.WriteLine(
            u'date,time,timezone,MACB,source,sourcetype,type,user,host,short,desc,'
            u'version,filename,inode,notes,format,extra\n')
Example #4
0
    def WriteHeader(self):
        """Writes the header to the output."""
        if self.store:
            self._hostnames = helper.BuildHostDict(self.store)
            for info in self.store.GetStorageInformation():
                if hasattr(info, 'store_range'):
                    for store_number in range(info.store_range[0],
                                              info.store_range[1]):
                        self._preprocesses[store_number] = info

        mapping = {
            self._doc_type: {
                u'_timestamp': {
                    u'enabled': True,
                    u'path': 'datetime',
                    u'format': 'date_time_no_millis'
                },
            }
        }
        # Check if the mappings exist (only create if not there).
        try:
            old_mapping_index = self._elastic_db.get_mapping(self._index_name)
            old_mapping = old_mapping_index.get(self._index_name, {})
            if self._doc_type not in old_mapping:
                self._elastic_db.put_mapping(self._index_name,
                                             self._doc_type,
                                             mapping=mapping)
        except (pyelasticsearch.ElasticHttpNotFoundError,
                pyelasticsearch.exceptions.ElasticHttpError):
            try:
                self._elastic_db.create_index(self._index_name,
                                              settings={'mappings': mapping})
            except pyelasticsearch.IndexAlreadyExistsError:
                raise RuntimeError(u'Unable to created the index')
        except requests.exceptions.ConnectionError as exception:
            logging.error(
                u'Unable to proceed, cannot connect to ElasticSearch backend '
                u'with error: {0:s}.\nPlease verify connection.'.format(
                    exception))
            raise RuntimeError(u'Unable to connect to ElasticSearch backend.')

        # pylint: disable=unexpected-keyword-arg
        self._elastic_db.health(wait_for_status='yellow')

        sys.stdout.write('Inserting data')
        sys.stdout.flush()