コード例 #1
0
ファイル: lpp.py プロジェクト: robertcv/DICE-BigData-Traffic
    def __init__(self, conf):
        """
        Initialize Kafka producers and web scrapers classes. Also load station
        and routes data.

        Args:
            conf (dict): This dict contains all configurations.

        """
        self.conf = conf['lpp']  # global lpp settings
        self.conf_lj = conf['location']  # lng and lat boundaries of Ljubljana
        self.conf_s = conf['scraper'].copy()  # scraper settings
        self.conf_s['timeout'] = 1
        self.conf_si = self.conf_s.copy()  # settings for ignoring status code
        self.conf_si['ignore_status_code'] = True
        self.w_scraper = scraper.Scraper(self.conf_s)
        # Some combinations of route - station do not have arrival time data
        # which means that we get a response code different form 200. When this
        # happens we do not wont to interrupt the program because this is not
        # really an error. The solution is to create a web scraper that ignores
        # status code errors.
        self.w_scraper_ignore = scraper.Scraper(self.conf_si)
        self.day = date_time.today_timestamp()

        self.live_producer = kafka_producer.Producer(
            conf['kafka_host'], self.conf['live']['kafka_topic'])
        self.station_producer = kafka_producer.Producer(
            conf['kafka_host'], self.conf['station']['kafka_topic'])
        self.static_producer = kafka_producer.Producer(
            conf['kafka_host'], self.conf['static']['kafka_topic'])

        self.stations_data_file = \
            conf['data_dir'] + self.conf['station']['data_file']
        self.stations_data = None

        self.routes_data_file = \
            conf['data_dir'] + self.conf['route']['data_file']
        self.routes_data = None

        self.routes_on_stations_data_file = \
            conf['data_dir'] + self.conf['routes_on_station']['data_file']
        self.routes_on_stations_data = None
コード例 #2
0
    def __init__(self, conf):
        """
        Initialize Kafka producer and web scraper classes.

        Args:
            conf (dict): This dict contains all configurations.

        """
        self.conf = conf['pollution']
        self.producer = kafka_producer.Producer(conf['kafka_host'],
                                                self.conf['kafka_topic'])
        self.w_scraper = scraper.Scraper(conf['scraper'])
コード例 #3
0
    def __init__(self, conf):
        """
        Initialize Kafka producer and Elasticsearch connection.

        Args:
            conf (dict): This dict contains all configurations.

        """
        self.conf = conf['inductive_loops']
        self.producer = kafka_producer.Producer(conf['kafka_host'],
                                                self.conf['kafka_topic'])
        self.ess = es_search.EsSearch(self.conf['es_host'],
                                      self.conf['es_port'],
                                      self.conf['es_index'])
        self.img_dir = conf['data_dir'] + self.conf['img_dir']
コード例 #4
0
    def __init__(self, conf):
        """
        Initialize Kafka producer and web scraper classes. Also load counters
        data.

        Args:
            conf (dict): This dict contains all configurations.

        """
        self.conf = conf['counters']
        self.conf_lj = conf['location']
        self.producer = kafka_producer.Producer(conf['kafka_host'],
                                                self.conf['kafka_topic'])
        self.w_scraper = scraper.Scraper(conf['scraper'])
        self.counters_data = None
        self.img_dir = conf['data_dir'] + self.conf['img_dir']
コード例 #5
0
    def __init__(self, conf):
        """
        Initialize Kafka producer and web scraper classes. Also load bluetooth
        data.

        Args:
            conf (dict): This dict contains all configurations.

        """
        self.conf = conf['bt_sensors']
        self.producer = kafka_producer.Producer(conf['kafka_host'],
                                                self.conf['kafka_topic'])

        self.w_scraper = scraper.Scraper(conf['scraper'],
                                         auth=(self.conf['timon_username'],
                                               self.conf['timon_password']),
                                         verify=self.conf['timon_crt_file'])

        self.not_lj = self.conf['not_lj']

        self.img_dir = conf['data_dir'] + self.conf['img_dir']
        self.sensors_data_file = conf['data_dir'] + self.conf['data_file']
        self.sensors_data = None
コード例 #6
0
 def setUp(self):
     self.mock_kafka = mock.Mock()
     kafka_producer.KafkaProducer = self.mock_kafka
     self.kafka = kafka_producer.Producer('127.0.0.1:9092', 'topic')