示例#1
0
    def __init__(self,
                 host,
                 token,
                 tlp,
                 confidence,
                 tags,
                 provider,
                 group,
                 ssl,
                 rcache,
                 ignore_cidr,
                 include_hp_tags=False):
        StreamHandler.__init__(self)
        self.tlp = tlp
        self.confidence = confidence
        self.tags = tags
        self.provider = provider
        self.group = group
        self.include_hp_tags = include_hp_tags
        self.cache = rcache
        self.ignore_cidr_list = ignore_cidr
        self.url = host + "/indicators"

        logging.debug(
            'Initializing Client instance with: {0}, {1}, {2}'.format(
                token, host, ssl))
        self.session = requests.Session()
        self.session.headers.update({'Authorization': 'Token token=' + token})
示例#2
0
    def __init__(self):
        StreamHandler.__init__(self)

        self._addon = xbmcaddon.Addon()
        addon_id = self._addon.getAddonInfo("id")
        formatter = Formatter("[{}] %(message)s".format(addon_id))
        self.setFormatter(formatter)
示例#3
0
    def __init__(self, cfg):

        StreamHandler.__init__(self)

        self.PUBLISH_PERIOD = cfg.TELEMETRY_PUBLISH_PERIOD
        self._last_publish = time.time()
        self._telem_q = queue.Queue()
        self._step_inputs = cfg.TELEMETRY_DEFAULT_INPUTS.split(',')
        self._step_types = cfg.TELEMETRY_DEFAULT_TYPES.split(',')
        self._total_updates = 0
        self._donkey_name = os.environ.get('DONKEY_NAME',
                                           cfg.TELEMETRY_DONKEY_NAME)
        self._mqtt_broker = os.environ.get(
            'DONKEY_MQTT_BROKER',
            cfg.TELEMETRY_MQTT_BROKER_HOST)  # 'iot.eclipse.org'
        self._topic = cfg.TELEMETRY_MQTT_TOPIC_TEMPLATE % self._donkey_name
        self._use_json_format = cfg.TELEMETRY_MQTT_JSON_ENABLE
        self._mqtt_client = MQTTClient()
        self._mqtt_client.connect(self._mqtt_broker,
                                  cfg.TELEMETRY_MQTT_BROKER_PORT)
        self._mqtt_client.loop_start()
        self._on = True
        if cfg.TELEMETRY_LOGGING_ENABLE:
            self.setLevel(logging.getLevelName(cfg.TELEMETRY_LOGGING_LEVEL))
            self.setFormatter(logging.Formatter(cfg.TELEMETRY_LOGGING_FORMAT))
            logger.addHandler(self)
示例#4
0
 def __init__(self, topic="app_log"):
     if kafka_producer is None:
         raise ValueError('kafka_producer not provided to kafka logger.')
     
     StreamHandler.__init__(self)
     self.kafka_producer = kafka_producer
     self.topic = topic
示例#5
0
    def __init__(self, broker, topic):
        StreamHandler.__init__(self)
        self.broker = broker
        self.topic = topic

        # Kafka Broker Configuration
        self.kafka_broker = KafkaSend(broker)
示例#6
0
 def __init__(self,
              filename='file_.log',
              mode='a',
              encoding=None,
              delay=False):
     """
     Open the specified file and use it as the stream for logging.
     """
     from iohandler import IOFile
     self.fph = IOFile.IOFileHandler()
     self.filename = filename
     filename = os.fspath(filename)
     self.baseFilename = os.path.abspath(filename)
     fn = os.path.basename(self.filename)
     self._file_object = self.fph.open(file_name=fn,
                                       file_path=os.path.dirname(filename),
                                       create_file_if_none=True,
                                       mode='ab+')
     self.mode = mode
     self.encoding = encoding
     self.delay = delay
     if delay:
         # We don't open the stream, but we still need to call the
         # Handler constructor to set level, formatter, lock etc.
         Handler.__init__(self)
         self.stream = None
     else:
         StreamHandler.__init__(self, self._open())
 def __init__(self, topic='ferris.logs'):
     StreamHandler.__init__(self)
     environment = ApplicationConfigurator().get('ferris.env')
     broker_url = f"{environment['KAFKA_BOOTSTRAP_SERVER']}:{environment['KAFKA_PORT']}"
     self.topic = topic
     # Kafka Broker Configuration
     self.kafka_broker = KafkaConfig(broker_url)
示例#8
0
 def __init__(self, process_id=None):
     StreamHandler.__init__(self)
     self.setFormatter(self._get_formatter())
     if process_id is None:
         self.process_id = os.environ.get('PROCESS_ID', 'xxxxxxxx')
     else:
         self.process_id = process_id
示例#9
0
    def __init__(self,
                 strm=None,
                 upd_level=None,
                 ansi_mode=None,
                 over_handler=None):
        ''' Initialise the `UpdHandler`.

        Parameters:
        * `strm`: the output stream, default `sys.stderr`.
        * `upd_level`: the magic logging level which updates the status line
          via `Upd`. Default: `STATUS`.
        * `ansi_mode`: if `None`, set from `strm.isatty()`.
          A true value causes the handler to colour certain logging levels
          using ANSI terminal sequences.
    '''
        if strm is None:
            strm = sys.stderr
        if upd_level is None:
            upd_level = STATUS
        if ansi_mode is None:
            ansi_mode = strm.isatty()
        StreamHandler.__init__(self, strm)
        self.upd = Upd(strm)
        self.upd_level = upd_level
        self.ansi_mode = ansi_mode
        self.over_handler = over_handler
        self.__lock = Lock()
示例#10
0
 def __init__(self):
     """
     Parameters
     ----------
     None
     """
     StreamHandler.__init__(self)
示例#11
0
 def __init__(self, filename, mode="a"):
     filename = os.path.abspath(filename)
     StreamHandler.__init__(self, open(filename, mode))
     self.baseFilename = filename
     self.mode = mode
     self._wr = weakref.ref(self, _remove_from_reopenable)
     _reopenable_handlers.append(self._wr)
示例#12
0
 def __init__(self, filename, mode="a"):
     filename = os.path.abspath(filename)
     StreamHandler.__init__(self, open(filename, mode))
     self.baseFilename = filename
     self.mode = mode
     self._wr = weakref.ref(self, _remove_from_reopenable)
     _reopenable_handlers.append(self._wr)
    def __init__(self, **kwargs):
        StreamHandler.__init__(self)
        if 'log_name' not in kwargs.keys():
            raise ValueError("Missing log_name value'")
        log_name = kwargs['log_name']
        if not isinstance(log_name, str):
            raise ValueError("Invalid value: log_name must be of type string")

        self.parse_json = False
        if 'parse_json' in kwargs.keys():
            parse_json = kwargs['parse_json']
            if not isinstance(parse_json, bool):
                raise ValueError(
                    "Invalid value: parse_json must be of type bool")
            self.parse_json = parse_json
        self.add_hostname = False
        if 'add_hostname' in kwargs.keys():
            add_hostname = kwargs['add_hostname']
            if not isinstance(add_hostname, bool):
                raise ValueError(
                    "Invalid value: parse_json must be of type bool")
            self.add_hostname = add_hostname
            self.hostname = socket.gethostname()

        self.logger = logging.Client().logger(log_name)
示例#14
0
    def __init__(self, cfg, default_inputs=None, default_types=None):

        StreamHandler.__init__(self)

        self.PUBLISH_PERIOD = cfg.TELEMETRY_PUBLISH_PERIOD
        self._last_publish = time.time()
        self._telem_q = queue.Queue()
        self._default_inputs = default_inputs or []
        self._default_types = default_types or []
        self._total_updates = 0
        self._donkey_name = os.environ.get('DONKEY_NAME',
                                           cfg.TELEMETRY_DONKEY_NAME)
        self._mqtt_broker = os.environ.get(
            'DONKEY_MQTT_BROKER',
            cfg.TELEMETRY_MQTT_BROKER_HOST)  # 'iot.eclipse.org'
        self._topic = cfg.TELEMETRY_MQTT_TOPIC_TEMPLATE % self._donkey_name
        self._use_json_format = cfg.TELEMETRY_MQTT_JSON_ENABLE
        self._mqtt_client = MQTTClient()
        self._mqtt_client.connect(self._mqtt_broker,
                                  cfg.TELEMETRY_MQTT_BROKER_PORT)
        self._mqtt_client.loop_start()
        self._on = True
        print(
            f"Telemetry MQTT server connected (publishing: {', '.join(self._default_inputs)}"
        )
 def __init__(self, client):
     StreamHandler.__init__(self)
     if type(client) not in CLIENT_TYPES:
         raise InvalidClientTypeException(
             "Invalid LoggerClient type (%s). Must be one of %s" %
             (type(client), CLIENT_TYPES))
     self._client = client
示例#16
0
 def __init__(self, gLog, attris, time=True):
     StreamHandler.__init__(self)
     self.gLog = gLog
     self.attris = _default_attri if attris is None else attris
     self.nattri = len(self.attris)
     if time:
         self.nattri += 1
     self.time = time
示例#17
0
 def __init__(self):
     StreamHandler.__init__(self)
     formatter = Formatter(
         '%(asctime)s.%(msecs)03d:%(levelname)-8s%(lineno)-4d: %(message)s',
     )
     formatter.datefmt = '%H:%M:%S'
     self.setFormatter(formatter)
     self.setLevel(DEBUG)
示例#18
0
 def __init__(self, host, port, tag, std_output_type=StdOutputType.Stdout):
     StreamHandler.__init__(self)
     self.host = host
     self.port = port
     self.tag = tag
     self.std_output_type = std_output_type
     self.orig_stdout = sys.__stdout__
     self.orig_stderr = sys.__stderr__
 def __init__(self, config):
     StreamHandler.__init__(self)
     with open(config, 'r', encoding='utf-8') as c:
         self.config = json.load(c)
     self.token = self.config['access_token']
     self.chat_id = self.config['chat_id']
     # to use this bot you have to obtain chat_id, where bot will send log messages, the appropriate method is below
     # this is optional for your environment, enter <you proxy here> if you have a proxy
     self.url = 'https://botapi.tamtam.chat/'
示例#20
0
    def __init__(self, workflow_uuid, publisher, stream=None):
        """
        Initialize the handler.

        If stream is not specified, sys.stderr is used.
        """
        StreamHandler.__init__(self, stream)
        self.workflow_uuid = workflow_uuid
        self.publisher = publisher
示例#21
0
 def __init__(self, mdk, get_session):
     """
     :param mdk: A ``mdk.MDK`` instance.
     :param get_session: Unary callable that returns the current MDK Session, or
         ``None``, in which case a default Session will be used.
     """
     StreamHandler.__init__(self)
     self._default_session = mdk.session()
     self._get_session = get_session
示例#22
0
文件: logging.py 项目: datawire/mdk
 def __init__(self, mdk, get_session):
     """
     :param mdk: A ``mdk.MDK`` instance.
     :param get_session: Unary callable that returns the current MDK Session, or
         ``None``, in which case a default Session will be used.
     """
     StreamHandler.__init__(self)
     self._default_session = mdk.session()
     self._get_session = get_session
示例#23
0
 def __init__(self, f, mode='a', encoding=None, delay=False):
     """
     Open the specified file and use it as the stream for logging.
     """
     # Issue #27493: add support for Path objects to be passed in
     self.f = f
     self.mode = mode
     self.encoding = encoding
     self.delay = delay
     StreamHandler.__init__(self, f)
    def __init__(self,
                 bot_token: str,
                 chat_ids: dict,
                 project_name: str,
                 use_proxy: bool = False,
                 request_kwargs: dict = None):

        StreamHandler.__init__(self)
        self.telegram_broker = TelegramLog(bot_token, chat_ids, project_name,
                                           use_proxy, request_kwargs)
示例#25
0
    def __init__(self):
        StreamHandler.__init__(self)

        with open('keys/influx_settings.json') as json_file:
            data = json.load(json_file)

            self.client = DataFrameClient(host=data.get("host"),
                                          port=data.get("port"),
                                          username=data.get("username"),
                                          password=data.get("password"))
            self.client.switch_database(data.get("database"))
示例#26
0
 def __init__(self, path, filename, mode='a', encoding=None, delay=False):
     if not Path(path).exists():
         Path.mkdir(path)
     self.baseFilename = Path.joinpath(path, filename)
     self.mode = mode
     self.encoding = encoding
     self.delay = delay
     if delay:
         Handler.__init__(self)
         self.stream = None
     else:
         StreamHandler.__init__(self, self._open())
示例#27
0
 def __init__(self):
     StreamHandler.__init__(self)
     self._fluent_host = os.getenv("LOG_FLUENT_HOST", None)
     self._fluent_port = os.getenv("LOG_FLUENT_PORT", None)
     self._fluent_app = os.getenv("LOG_FLUENT_APP", None)
     self._fluent_tag = os.getenv("LOG_FLUENT_TAG", None)
     if self._fluent_host:
         self._fluentd = FluentPilot(host=self._fluent_host,
                                     port=self._fluent_port,
                                     app=self._fluent_app,
                                     tag=self._fluent_tag)
     else:
         self._fluentd = None
 def __init__(self, path, filename, mode='a', encoding=None, delay=False):
     filename = os.fspath(filename)
     if not os.path.exists(path):
         os.mkdir(path)
     self.baseFilename = os.path.join(path, filename)
     self.mode = mode
     self.encoding = encoding
     self.delay = delay
     if delay:
         Handler.__init__(self)
         self.stream = None
     else:
         StreamHandler.__init__(self, self._open())
示例#29
0
    def __init__(self,
                 stream_name: str,
                 partition_key: str,
                 *,
                 chunk_size: int = DEFAULT_CHUNK_SIZE,
                 encoder: str = 'utf-8',
                 workers: int = 1,
                 **boto_session_kwargs):
        """
        :param stream_name: Name of the Kinesis stream
        :type stream_name: str
        :param partition_key: Kinesis partition key used to group data by shards
        :type partition_key: str
        :param chunk_size: the size of a a chunk of records for rotation threshold (default 524288)
        :type chunk_size: int
        :param encoder: the encoder to be used for log records (default 'utf-8')
        :type encoder: str
        :param workers: the number of background workers that rotate log records (default 1)
        :type workers: int
        :param boto_session_kwargs: additional keyword arguments for the AWS Kinesis Resource
        :type boto_session_kwargs: boto3 resource keyword arguments
        """

        args_validation = (
            ValidationRule(stream_name, is_non_empty_string,
                           empty_str_err('stream_name')),
            ValidationRule(chunk_size, is_positive_int,
                           bad_integer_err('chunk_size')),
            ValidationRule(encoder, is_non_empty_string,
                           empty_str_err('encoder')),
            ValidationRule(workers, is_positive_int,
                           bad_integer_err('workers')),
        )

        for rule in args_validation:
            assert rule[1](rule[0]), rule[3]

        self.stream = KinesisStream(stream_name,
                                    partition_key,
                                    chunk_size=chunk_size,
                                    encoder=encoder,
                                    workers=workers,
                                    **boto_session_kwargs)

        # Make sure we gracefully clear the buffers and upload the missing parts before exiting
        signal.signal(signal.SIGTERM, self._teardown)
        signal.signal(signal.SIGINT, self._teardown)
        signal.signal(signal.SIGQUIT, self._teardown)
        atexit.register(self.close)

        StreamHandler.__init__(self, self.stream)
示例#30
0
    def __init__(self):
        regex = os.getenv('PM_LOGEXCLUDE', '')

        if regex:
            try:
                rex = re.compile(regex)
                print "Using %s to filter logging" % regex
                self.rex = rex
            except:
                print "Error while compiling except regex %s" % regex
                self.rex = None
        else:
            self.rex = None

        StreamHandler.__init__(self)
示例#31
0
 def __init__(self):
     StreamHandler.__init__(self)
     self.results = {
         "name":
         "Specification name",
         "status":
         "passed",
         "location":
         "filename.xml",
         "elements": [{
             "keyword": "Scenario",
             "name": "Checking IDS specifications",
             "status": "passed",
             "steps": []
         }],
     }
示例#32
0
    def __init__(self, key: str, bucket: str, *, chunk_size: int = DEFAULT_CHUNK_SIZE,
                 time_rotation: int = DEFAULT_ROTATION_TIME_SECS, max_file_size_bytes: int = MAX_FILE_SIZE_BYTES,
                 encoder: str = 'utf-8',
                 max_threads: int = 1, compress: bool = False, **boto_session_kwargs):
        """

        :param key: The path of the S3 object
        :type key: str
        :param bucket: The id of the S3 bucket
        :type bucket: str
        :param chunk_size: size of a chunk in the multipart upload in bytes (default 5MB)
        :type chunk_size: int
        :param time_rotation: Interval in seconds to rotate the file by (default 12 hours)
        :type time_rotation: int
        :param max_file_size_bytes: maximum file size in bytes before rotation (default 100MB)
        :type max_file_size_bytes: int
        :param encoder: default utf-8
        :type encoder: str
        :param max_threads: the number of threads that a stream handler would run for file and chunk rotation tasks,
               only useful if emitting lot's of records
        :type max_threads: int
        :param compress: indicating weather to save a compressed gz suffixed file
        :type compress: bool
        """

        args_validation = (
            ValidationRule(time_rotation, is_positive_int, bad_integer_err('time_rotation')),
            ValidationRule(max_file_size_bytes, is_positive_int, bad_integer_err('max_file_size_bytes')),
            ValidationRule(encoder, is_non_empty_string, empty_str_err('encoder')),
            ValidationRule(max_threads, is_positive_int, bad_integer_err('thread_count')),
        )

        for rule in args_validation:
            assert rule.func(rule.arg), rule.message

        self.bucket = bucket
        self.stream = S3Stream(self.bucket, key, chunk_size=chunk_size, max_file_log_time=time_rotation,
                               max_file_size_bytes=max_file_size_bytes, encoder=encoder, workers=max_threads,
                               compress=compress, **boto_session_kwargs)

        # Make sure we gracefully clear the buffers and upload the missing parts before exiting
        signal.signal(signal.SIGTERM, self._teardown)
        signal.signal(signal.SIGINT, self._teardown)
        signal.signal(signal.SIGQUIT, self._teardown)
        atexit.register(self.close)

        StreamHandler.__init__(self, self.stream)
示例#33
0
 def __init__(self, stream):
     StreamHandler.__init__(self, AnsiToWin32(stream).stream)
示例#34
0
 def __init__(self):
     self.buffer = StringIO()
     StreamHandler.__init__(self, self.buffer)
     self.setLevel(logging.DEBUG2)
     fmt = '%(asctime)-15s %(levelname)-8s %(message)s'
     self.setFormatter(logging.Formatter(fmt))
示例#35
0
 def __init__(self, filename, mode="a"):
     filename = os.path.abspath(filename)
     StreamHandler.__init__(self, open(filename, mode))
     self.baseFilename = filename
     self.mode = mode
示例#36
0
 def __init__(self, task, *args, **kwargs):
     """Initialize the handler with the task we are handling."""
     StreamHandler.__init__(self, *args, **kwargs)
     self.task = task
     self.clear()
示例#37
0
 def __init__(self):
     StreamHandler.__init__(self, stream=sys.stdout)
     self.setFormatter(logging.Formatter("%(asctime)s %(levelname)-5s %(message)s"))
示例#38
0
文件: log.py 项目: regit/nufw
 def __init__(self):
     StreamHandler.__init__(self)