def parse(line): '''Parse a Point from a log line and return it, or None if no data point.''' match = re.match(r'^INFO METRIC: (.*)$', line) if match: json_str = match.group(1) try: raw = json.loads(json_str) return Point( metric_type=raw.get('type'), metric=raw.get('metric'), value=raw.get('value'), tags=raw.get('tags')) except Exception as exc: # pylint: disable=broad-except get_logger().warning('Error parsing metric: %s', exc) return None
def __init__(self, metric, tags=None, log_interval=DEFAULT_LOG_INTERVAL): self.metric = metric self.value = 0 self.tags = tags if tags else {} self.log_interval = log_interval self.logger = get_logger() self.last_log_time = time.time()
import datetime import logging import re from jsonschema import RefResolver import singer.metadata from singer.logger import get_logger from singer.utils import (strftime, strptime_to_utc) LOGGER = get_logger() NO_INTEGER_DATETIME_PARSING = "no-integer-datetime-parsing" UNIX_SECONDS_INTEGER_DATETIME_PARSING = "unix-seconds-integer-datetime-parsing" UNIX_MILLISECONDS_INTEGER_DATETIME_PARSING = "unix-milliseconds-integer-datetime-parsing" VALID_DATETIME_FORMATS = [ NO_INTEGER_DATETIME_PARSING, UNIX_SECONDS_INTEGER_DATETIME_PARSING, UNIX_MILLISECONDS_INTEGER_DATETIME_PARSING, ] def string_to_datetime(value): try: return strftime(strptime_to_utc(value)) except Exception as ex: LOGGER.warning("%s, (%s)", ex, value) return None def unix_milliseconds_to_datetime(value):
import os import json from singer import logger, metadata from singer.catalog import Catalog from tap_gbfs.streams import STREAM_OBJECTS LOGGER = logger.get_logger() def _get_abs_path(path): return os.path.join(os.path.dirname(os.path.realpath(__file__)), path) # Load schemas from schemas folder def _load_schemas(): schemas = {} schemas_path = _get_abs_path("schemas") for filename in os.listdir(schemas_path): path = os.path.join(schemas_path, filename) file_raw = filename.replace(".json", "") with open(path) as f: schemas[file_raw] = json.load(f) return schemas def do_discover(client): raw_schemas = _load_schemas()
def __init__(self, metric, tags): self.metric = metric self.tags = tags if tags else {} self.logger = get_logger() self.start_time = None