def _create_datasource(self, headers: Union[str, None] = None) -> None:
        """
        Create DataSource
        :param headers: Request Headers
        """
        try:
            super()._create_datasource(headers)

            self.df = self.create_dataframe(ignore_object_tags=['fieldAliases', 'fields'])

            ### Renaming the columns so that they are not confused with GreenwichMeta
            self.df.rename(index=str, columns={'baycount': 'baycount_2',
                                               'baytype': 'baytype_2'},
                           inplace=True)

            loc = Location('latitude', 'longitude')
            self.create_datasource(dataframe=self.df, sensor_tag='lotcode', attribute_tag=['baycount_2', 'baytype_2'],
                                   unit_value=[], bespoke_unit_tag=[], description=[], bespoke_sub_theme=[],
                                   location_tag=loc, sensor_prefix='smart_parking_2_',
                                   api_timestamp_tag='run_time_stamp',
                                   is_dependent=True)
            self.importer_status.status = Status.success(__class__.__name__)

        except Exception as e:

            self.importer_status.status = Status.failure(__class__.__name__, e.__str__(), traceback.format_exc())
Exemplo n.º 2
0
    def _create_datasource(self, headers: Union[str, None] = None) -> None:
        """
        Create DataSource
        :param headers: Request Headers
        """
        try:
            if not headers:
                headers = json.loads(self.HEADERS.replace("'", '"'))

            super()._create_datasource(headers)
            self.df = self.create_dataframe(object_separator=None)
            self.df['api_timestamp_tag'] = datetime.now().timestamp()
            loc = Location('latitudine', 'longitudine')
            self.create_datasource(dataframe=self.df,
                                   sensor_tag='dev_eui',
                                   attribute_tag=['dev_eui'],
                                   unit_value=[],
                                   bespoke_unit_tag=[],
                                   description=['descrizione'],
                                   bespoke_sub_theme=[],
                                   location_tag=loc,
                                   sensor_prefix='',
                                   api_timestamp_tag='api_timestamp_tag',
                                   is_dependent=True)
            self.importer_status.status = Status.success(__class__.__name__)
        except Exception as e:
            self.importer_status.status = Status.failure(
                __class__.__name__, e.__str__(), traceback.format_exc())
Exemplo n.º 3
0
    def _create_datasource(self,
                           headers=json.loads(HEADERS_K.replace("'", '"'))):
        super()._create_datasource(headers)

        self.df = self.create_dataframe(object_separator=None)

        self.df['latitude'] = 0.
        self.df['longitude'] = 0.

        for i in range(0, len(self.df)):
            lid = Sensor.get_by_name_in(
                [self.df.parkingSpotSensorCode.iloc[i]])[0].l_id
            loc = location.Location.get_by_id_in([lid])[0]
            self.df.set_value(i, 'latitude', loc.lat)
            self.df.set_value(i, 'longitude', loc.lon)

        self.df['api_timestamp_tag'] = pd.to_datetime(self.df['datetime'])
        self.df['api_timestamp_tag'] = self.df['api_timestamp_tag'].astype(int)
        loc = Location('latitude', 'longitude')

        self.create_datasource(dataframe=self.df,
                               sensor_tag='parkingSpotSensorCode',
                               attribute_tag=['state'],
                               unit_value=[],
                               bespoke_unit_tag=[],
                               description=['No Description'],
                               bespoke_sub_theme=[],
                               location_tag=loc,
                               sensor_prefix='',
                               api_timestamp_tag='api_timestamp_tag')
Exemplo n.º 4
0
    def _create_datasource(self, headers=None):
        super()._create_datasource(headers)
        self.df  = self.create_dataframe(ignore_object_tags=['fieldAliases', 'fields'])

        ### Hardcoding the location 
        self.df['latitude'] = 51.484216
        self.df['longitude'] = 0.002162
        self.df['description'] = 'residential house energy consumption'
        
        ### Faking a sensor id since the api returns data only from one sensor
        ### Need to modify it when the api starts sourcing data from more sensors
        self.df['tag'] = 0

        #### the attribute names are too big for name+hashing as table names.
        self.df.rename(index=str, columns={'power_wm_sid_761573_wholehouse': 'power_sid_761573',
                       'light_avg_lux_sid_400191_e_room': 'avg_lux_sid_400191',
                       'temp_avg_degc_sid_400191_e_room' : 'temp_sid_400191'}, 
                      inplace=True)
        
        loc = Location('latitude', 'longitude')
        
        self.create_datasource(dataframe=self.df, sensor_tag='tag', attribute_tag=['power_sid_761573',
                                                                                  'avg_lux_sid_400191',
                                                                                  'temp_sid_400191'], 
                                            unit_value=[4,5,6],bespoke_sub_theme=[3,3,1], 
                               bespoke_unit_tag=[4,5,6],
                               location_tag=loc,
                               description=['description','description','description'],
                               api_timestamp_tag='time',
                               sensor_prefix='',
                               is_dependent=True)
Exemplo n.º 5
0
 def _create_datasource(self, headers=None):
     super()._create_datasource(headers)
     columns = [
         'device_title', 'device_eui', 'device_description', 'driver_type',
         'code', 'max_tilt', 'temperature', 'dimmer_perc', 'dimmer_read',
         'dimmer_default', 'dimmer_set', 'datetime', 'do2',
         'firmware_version', 'tilt_angle', 'connected_device',
         'energy_delivered', 'di4', 'di5', 'energy_consumed', 'do1', 'di1',
         'di2', 'di3', 'family_id', 'lat', 'lng', 'device_id'
     ]
     self.df = self.create_dataframe(object_separator='device_title')
     self.df = self.df[self.df['device_title'] == 'Lampione']
     self.df = self.df[columns]
     loc = Location('lat', 'lng')
     self.create_datasource(dataframe=self.df,
                            sensor_tag='device_id',
                            attribute_tag=[
                                'temperature', 'dimmer_perc', 'dimmer_read',
                                'dimmer_default', 'dimmer_set', 'do2',
                                'tilt_angle', 'connected_device',
                                'energy_delivered', 'di4', 'di5',
                                'energy_consumed', 'do1', 'di1', 'di2',
                                'di3'
                            ],
                            unit_value=[],
                            bespoke_unit_tag=[],
                            description=['No Description'],
                            bespoke_sub_theme=[],
                            location_tag=loc,
                            sensor_prefix='Lampione_',
                            api_timestamp_tag='datetime')
Exemplo n.º 6
0
    def _create_datasource(self,
                           headers=json.loads(HEADERS_KM.replace("'", '"'))):
        super()._create_datasource(headers)

        self.df = self.create_dataframe(object_separator=None)
        self.df['api_timestamp_tag'] = datetime.now().timestamp()

        ### The response contains null coordinates...Filling them with SHCS02001 coordinates.
        self.df['latitude'].fillna(value=self.df.iloc[0]['latitude'],
                                   inplace=True)
        self.df['longitude'].fillna(value=self.df.iloc[0]['longitude'],
                                    inplace=True)

        loc = Location('latitude', 'longitude')
        self.create_datasource(
            dataframe=self.df,
            sensor_tag='code',
            attribute_tag=['parkingSpotType', 'positionOnMap'],
            unit_value=[],
            bespoke_unit_tag=[],
            description=['description'],
            bespoke_sub_theme=[],
            location_tag=loc,
            sensor_prefix='',
            api_timestamp_tag='api_timestamp_tag',
            is_dependent=True)
    def _create_datasource(self, headers: Union[str, None] = None) -> None:
        """
        Create DataSource
        :param headers: Request Headers
        """
        try:
            super()._create_datasource(headers)
            self.df = self.create_dataframe(ignore_object_tags=['fieldAliases', 'fields'])

            names = self.df['lotcode'].tolist()
            name_set = set()
            location_sensor = {}
            sensor_location = {}
            latitude = []
            longitude = []

            for s in names:
                name_set.add('smart_parking_2_' + str(s))

            sensors = Sensor.get_by_name_in(name_set)
            loc_ids = []
            for s in sensors:
                loc_ids.append(s.l_id)
                location_sensor[s.l_id] = s
            locations = location.Location.get_by_id_in(loc_ids)

            for loc in locations:
                if loc.id in location_sensor:
                    _sensor = location_sensor[loc.id]
                    sensor_location[_sensor.name] = loc

            for s in names:
                _s = 'smart_parking_2_' + str(s)
                if _s in sensor_location:
                    latitude.append(sensor_location[_s].lat)
                    longitude.append(sensor_location[_s].lon)

            self.df['latitude'] = latitude
            self.df['longitude'] = longitude

            self.df.rename(index=str, columns={'free': 'free_2',
                                               'isoffline': 'isoffline_2',
                                               'occupied': 'occupied_2'},
                           inplace=True)

            loc = Location('latitude', 'longitude')
            self.create_datasource(dataframe=self.df, sensor_tag='lotcode',
                                   attribute_tag=['free_2', 'isoffline_2', 'occupied_2'],
                                   unit_value=[], bespoke_unit_tag=[], description=[], bespoke_sub_theme=[],
                                   location_tag=loc,
                                   sensor_prefix='smart_parking_2_', api_timestamp_tag='run_time_stamp',
                                   is_dependent=True)

            self.importer_status.status = Status.success(__class__.__name__)

        except Exception as e:

            self.importer_status.status = Status.failure(__class__.__name__, e.__str__(), traceback.format_exc())
Exemplo n.º 8
0
    def _create_datasource(self, headers=None):
        super()._create_datasource(headers)
        self.df  = self.create_dataframe(ignore_object_tags=['fieldAliases', 'fields'])

        loc = Location('latitude', 'longitude')
        self.create_datasource(dataframe= self.df, sensor_tag='lotcode', attribute_tag=['baycount', 'baytype'], 
                                unit_value=[], bespoke_unit_tag=[], description=[], bespoke_sub_theme=[], 
                                location_tag=loc, sensor_prefix='smart_parking_', api_timestamp_tag='run_time_stamp',
                                is_dependent=True)
    def _create_datasource(self, headers: Union[str, None] = None) -> None:
        """
        Create DataSource
        :param headers: Request Headers
        """
        try:
            super()._create_datasource(headers)
            self.df = self.create_dataframe(ignore_object_tags=['fieldAliases', 'fields'])

            ### Hardcoding the location. As there is no information on the location of the sensor
            ### the centroid coordinates of Greenwich is used
            self.df['latitude'] = 51.482877
            self.df['longitude'] = -0.007516
            self.df['description'] = 'siemens energy'

            ### Faking a sensor id since the api returns data only from one sensor
            ### Need to modify it when the api starts sourcing data from more sensors
            self.df['tag'] = 0

            ### As of current behaviour _create_datasource method fails if the dataframe passed contains null values
            ### eg:  displayFieldName   date_time   b1_heat_value   b1_flow_value
            ###      0  1521480600000   20  null
            ### The only way to import would be to drop the nulls but this will drop the row all together.
            ### We could choose a default value for nulls like -999 but there must be a more flexible way.
            ### For now (illustrative purposes) we just drop
            self.df.dropna(inplace=True)

            loc = Location('latitude', 'longitude')

            self.create_datasource(dataframe=self.df, sensor_tag='tag', attribute_tag=['b1_heat_value',
                                                                                       'b1_flow_value',
                                                                                       'b1_temp_out_value',
                                                                                       'b1_temp_back_value',
                                                                                       'b2_heat_value',
                                                                                       'b2_flow_value',
                                                                                       'b2_temp_out_value',
                                                                                       'b2_temp_back_value',
                                                                                       'b3_heat_value',
                                                                                       'b3_flow_value',
                                                                                       'b3_temp_out_value',
                                                                                       'b3_temp_back_value'],
                                   unit_value=[], bespoke_sub_theme=[],
                                   bespoke_unit_tag=[],
                                   location_tag=loc,
                                   description=[],
                                   api_timestamp_tag='run_time_stamp',
                                   sensor_prefix='',
                                   is_dependent=True)

            self.importer_status.status = Status.success(__class__.__name__)

        except Exception as e:
            self.importer_status.status = Status.failure(__class__.__name__, e.__str__(), traceback.format_exc())
Exemplo n.º 10
0
    def _create_datasource(self, headers: Union[str, None] = None) -> None:
        """
        Create DataSource
        :param headers: Request Headers
        """
        try:
            if not headers:
                headers = json.loads(self.HEADERS.replace("'", '"'))

            super()._create_datasource(headers)

            self.df = self.create_dataframe(object_separator=None)
            names = self.df['dev_eui'].tolist()
            name_set = set()
            location_sensor = {}

            for s in names:
                name_set.add(str(s))

            sensors = Sensor.get_by_name_in(name_set)
            loc_ids = []
            for s in sensors:
                loc_ids.append(s.l_id)
                location_sensor[s.l_id] = s
                locations = location.Location.get_by_id_in(loc_ids)

            d = dict(zip([n.name for n in sensors], locations))

            self.df['latitude'] = self.df['dev_eui'].apply(
                lambda x: d.get(x).lat)
            self.df['longitude'] = self.df['dev_eui'].apply(
                lambda x: d.get(x).lon)
            self.df['api_timestamp_tag'] = pd.to_datetime(self.df['data'])
            self.df['api_timestamp_tag'] = self.df['api_timestamp_tag'].astype(
                int)

            loc = Location('latitude', 'longitude')

            self.create_datasource(
                dataframe=self.df,
                sensor_tag='dev_eui',
                attribute_tag=['pressione', 'temperatura', 'umidita'],
                unit_value=[],
                bespoke_unit_tag=[],
                description=['No Description'],
                bespoke_sub_theme=[],
                location_tag=loc,
                sensor_prefix='',
                api_timestamp_tag='api_timestamp_tag')
            self.importer_status.status = Status.success(__class__.__name__)
        except Exception as e:
            self.importer_status.status = Status.failure(
                __class__.__name__, e.__str__(), traceback.format_exc())
Exemplo n.º 11
0
    def _create_datasource(self, headers: Union[str, None] = None) -> None:
        """
        Create DataSource
        :param headers: Request Headers
        """
        try:
            if not headers:
                headers = json.loads(self.HEADERS.replace("'", '"'))
            super()._create_datasource(headers)

            data = self.load_dataset(headers)
            df = pd.DataFrame(
                columns=['plate', 'rentalState', 'date', 'duration'])
            index = 0

            for plate in data[0]['vehicles']:
                for s in plate['statuses']:
                    df.at[index, 'plate'] = plate['plate']
                    df.at[index, 'rentalState'] = s['rentalState']
                    df.at[index, 'date'] = s['dateFrom']
                    df.at[index, 'duration'] = np.abs((datetime.strptime(s['dateTill'], '%Y-%m-%dT%H:%M:%SZ') - \
                                                       datetime.strptime(s['dateFrom'],
                                                                         '%Y-%m-%dT%H:%M:%SZ')).total_seconds())
                    index = index + 1

            df['latitude'] = 45.443384
            df['longitude'] = 9.221501
            loc = Location('latitude', 'longitude')

            df['api_timestamp_tag'] = pd.to_datetime(df['date'])
            df['api_timestamp_tag'] = df['api_timestamp_tag'].astype(int)

            self.create_datasource(dataframe=df,
                                   sensor_tag='plate',
                                   attribute_tag=['rentalState', 'duration'],
                                   unit_value=[7, 8],
                                   bespoke_unit_tag=[],
                                   description=[
                                       'Information on activities '
                                       'relted to two e-car used by '
                                       'the inhabitants of a '
                                       'condominium located in viale '
                                       'Bacchiglione'
                                   ],
                                   bespoke_sub_theme=[2, 2],
                                   location_tag=loc,
                                   sensor_prefix='',
                                   api_timestamp_tag='api_timestamp_tag')
            self.importer_status.status = Status.success(__class__.__name__)
        except Exception as e:
            self.importer_status.status = Status.failure(
                __class__.__name__, e.__str__(), traceback.format_exc())
Exemplo n.º 12
0
    def _create_datasource(self, headers=None):
        super()._create_datasource(headers)
        self.df  = self.create_dataframe(ignore_object_tags=['fieldAliases', 'fields'])

        ### Renaming the columns so that they are not confused with GreenwichMeta
        self.df.rename(index=str, columns={'baycount': 'baycount_2',
                       'baytype': 'baytype_2'}, 
                      inplace=True)

        loc = Location('latitude', 'longitude')
        self.create_datasource(dataframe= self.df, sensor_tag='lotcode', attribute_tag=['baycount_2', 'baytype_2'], 
                                unit_value=[], bespoke_unit_tag=[], description=[], bespoke_sub_theme=[], 
                                location_tag=loc, sensor_prefix='smart_parking_2_', api_timestamp_tag='run_time_stamp',
                                is_dependent=True)
Exemplo n.º 13
0
    def _create_datasource(self, headers=None):
        super()._create_datasource(headers)
        self.df  = self.create_dataframe(ignore_object_tags=['fieldAliases', 'fields'])

        names = self.df['lotcode'].tolist()
        name_set = set()
        location_sensor = {}
        sensor_location = {}
        latitude = []
        longitude = []

        for s in names:
            name_set.add('smart_parking_2_' + str(s))

        sensors = Sensor.get_by_name_in(name_set)
        loc_ids = []
        for s in sensors:
            loc_ids.append(s.l_id)
            location_sensor[s.l_id] = s
        locations = location.Location.get_by_id_in(loc_ids)

        for loc in locations:
            if loc.id in location_sensor:
                _sensor = location_sensor[loc.id]
                sensor_location[_sensor.name] = loc
       
        for s in names:
            _s = 'smart_parking_2_' + str(s)
            if _s in sensor_location:
                latitude.append(sensor_location[_s].lat)
                longitude.append(sensor_location[_s].lon)

        self.df['latitude'] = latitude
        self.df['longitude'] = longitude

        ### Renaming the columns so that they are not confused with GreenwichOCC
        self.df.rename(index=str, columns={'free': 'free_2',
                       'isoffline': 'isoffline_2',
                       'occupied' : 'occupied_2'}, 
                      inplace=True)

        loc = Location('latitude', 'longitude')
        self.create_datasource(dataframe= self.df, sensor_tag='lotcode', attribute_tag=['free_2', 'isoffline_2', 'occupied_2'], 
                                unit_value=[], bespoke_unit_tag=[], description=[], bespoke_sub_theme=[], location_tag=loc,
                                sensor_prefix='smart_parking_2_', api_timestamp_tag='run_time_stamp', is_dependent=True)
Exemplo n.º 14
0
    def _create_datasource(self,
                           headers=json.loads(HEADERS_SMM.replace("'", '"'))):
        super()._create_datasource(headers)

        self.df = self.create_dataframe(object_separator=None)
        self.df['api_timestamp_tag'] = datetime.now().timestamp()
        loc = Location('latitudine', 'longitudine')
        self.create_datasource(dataframe=self.df,
                               sensor_tag='dev_eui',
                               attribute_tag=['dev_eui'],
                               unit_value=[],
                               bespoke_unit_tag=[],
                               description=['descrizione'],
                               bespoke_sub_theme=[],
                               location_tag=loc,
                               sensor_prefix='',
                               api_timestamp_tag='api_timestamp_tag',
                               is_dependent=True)
Exemplo n.º 15
0
    def _create_datasource(self,
                           headers=json.loads(HEADERS_SM.replace("'", '"'))):
        super()._create_datasource(headers)

        self.df = self.create_dataframe(object_separator=None)
        names = self.df['dev_eui'].tolist()
        name_set = set()
        location_sensor = {}
        sensor_location = {}
        sensor_name_location = {}

        latitude = []
        longitude = []

        for s in names:
            name_set.add(str(s))

        sensors = Sensor.get_by_name_in(name_set)
        loc_ids = []
        for s in sensors:
            loc_ids.append(s.l_id)
            location_sensor[s.l_id] = s
            locations = location.Location.get_by_id_in(loc_ids)

        d = dict(zip([n.name for n in sensors], locations))

        self.df['latitude'] = self.df['dev_eui'].apply(lambda x: d.get(x).lat)
        self.df['longitude'] = self.df['dev_eui'].apply(lambda x: d.get(x).lon)
        self.df['api_timestamp_tag'] = pd.to_datetime(self.df['data'])
        self.df['api_timestamp_tag'] = self.df['api_timestamp_tag'].astype(int)

        loc = Location('latitude', 'longitude')

        self.create_datasource(
            dataframe=self.df,
            sensor_tag='dev_eui',
            attribute_tag=['pressione', 'temperatura', 'umidita'],
            unit_value=[],
            bespoke_unit_tag=[],
            description=['No Description'],
            bespoke_sub_theme=[],
            location_tag=loc,
            sensor_prefix='',
            api_timestamp_tag='api_timestamp_tag')
    def _create_datasource(self, headers: Union[str, None] = None) -> None:
        """
        Create DataSource
        :param headers: Request Headers
        """
        try:
            super()._create_datasource(headers)
            self.df = self.create_dataframe(ignore_object_tags=['fieldAliases', 'fields'])

            ### Hardcoding the location
            self.df['latitude'] = 51.484216
            self.df['longitude'] = 0.002162
            self.df['description'] = 'residential house energy consumption'

            ### Faking a sensor id since the api returns data only from one sensor
            ### Need to modify it when the api starts sourcing data from more sensors
            self.df['tag'] = 0

            #### the attribute names are too big for name+hashing as table names.
            self.df.rename(index=str, columns={'power_wm_sid_761573_wholehouse': 'power_sid_761573',
                                               'light_avg_lux_sid_400191_e_room': 'avg_lux_sid_400191',
                                               'temp_avg_degc_sid_400191_e_room': 'temp_sid_400191'},
                           inplace=True)

            loc = Location('latitude', 'longitude')

            self.create_datasource(dataframe=self.df, sensor_tag='tag', attribute_tag=['power_sid_761573',
                                                                                       'avg_lux_sid_400191',
                                                                                       'temp_sid_400191'],
                                   unit_value=[4, 5, 6], bespoke_sub_theme=[3, 3, 1],
                                   bespoke_unit_tag=[4, 5, 6],
                                   location_tag=loc,
                                   description=['description', 'description', 'description'],
                                   api_timestamp_tag='time',
                                   sensor_prefix='',
                                   is_dependent=True)

            self.importer_status.status = Status.success(__class__.__name__)

        except Exception as e:
            self.importer_status.status = Status.failure(__class__.__name__, e.__str__(), traceback.format_exc())
Exemplo n.º 17
0
 def _create_datasource(self, headers: Union[str, None] = None) -> None:
     """
     Create DataSource
     :param headers: Request Headers
     """
     try:
         super()._create_datasource(headers)
         columns = [
             'device_title', 'device_eui', 'device_description',
             'driver_type', 'code', 'max_tilt', 'temperature',
             'dimmer_perc', 'dimmer_read', 'dimmer_default', 'dimmer_set',
             'datetime', 'do2', 'firmware_version', 'tilt_angle',
             'connected_device', 'energy_delivered', 'di4', 'di5',
             'energy_consumed', 'do1', 'di1', 'di2', 'di3', 'family_id',
             'lat', 'lng', 'device_id'
         ]
         self.df = self.create_dataframe(object_separator='device_title')
         self.df = self.df[self.df['device_title'] == 'Lampione']
         self.df = self.df[columns]
         loc = Location('lat', 'lng')
         self.create_datasource(dataframe=self.df,
                                sensor_tag='device_id',
                                attribute_tag=[
                                    'temperature', 'dimmer_perc',
                                    'dimmer_read', 'dimmer_default',
                                    'dimmer_set', 'do2', 'tilt_angle',
                                    'connected_device', 'energy_delivered',
                                    'di4', 'di5', 'energy_consumed', 'do1',
                                    'di1', 'di2', 'di3'
                                ],
                                unit_value=[],
                                bespoke_unit_tag=[],
                                description=['No Description'],
                                bespoke_sub_theme=[],
                                location_tag=loc,
                                sensor_prefix='Lampione_',
                                api_timestamp_tag='datetime')
         self.importer_status.status = Status.success(__class__.__name__)
     except Exception as e:
         self.importer_status.status = Status.failure(
             __class__.__name__, e.__str__(), traceback.format_exc())
Exemplo n.º 18
0
    def _create_datasource(self, headers: Union[str, None] = None) -> None:
        """
        Create DataSource
        :param headers: Request Headers
        """
        try:
            if not headers:
                headers = json.loads(self.HEADERS.replace("'", '"'))

            super()._create_datasource(headers)
            self.df = self.create_dataframe(object_separator=None)

            self.df['latitude'] = 0.
            self.df['longitude'] = 0.

            for i in range(0, len(self.df)):
                lid = Sensor.get_by_name_in(
                    [self.df.parkingSpotSensorCode.iloc[i]])[0].l_id
                loc = location.Location.get_by_id_in([lid])[0]
                self.df.set_value(i, 'latitude', loc.lat)
                self.df.set_value(i, 'longitude', loc.lon)

            self.df['api_timestamp_tag'] = pd.to_datetime(self.df['datetime'])
            self.df['api_timestamp_tag'] = self.df['api_timestamp_tag'].astype(
                int)
            loc = Location('latitude', 'longitude')

            self.create_datasource(dataframe=self.df,
                                   sensor_tag='parkingSpotSensorCode',
                                   attribute_tag=['state'],
                                   unit_value=[],
                                   bespoke_unit_tag=[],
                                   description=['No Description'],
                                   bespoke_sub_theme=[],
                                   location_tag=loc,
                                   sensor_prefix='',
                                   api_timestamp_tag='api_timestamp_tag')
            self.importer_status.status = Status.success(__class__.__name__)
        except Exception as e:
            self.importer_status.status = Status.failure(
                __class__.__name__, e.__str__(), traceback.format_exc())
Exemplo n.º 19
0
    def _create_datasource(self, headers: Union[str, None] = None) -> None:
        """
        Create DataSource
        :param headers: Request Headers
        """
        try:
            if not headers:
                json.loads(self.HEADERS.replace("'", '"'))

            super()._create_datasource(headers)

            self.df = self.create_dataframe(object_separator=None)
            self.df['api_timestamp_tag'] = datetime.now().timestamp()

            ### The response contains null coordinates...Filling them with SHCS02001 coordinates.
            self.df['latitude'].fillna(value=self.df.iloc[0]['latitude'],
                                       inplace=True)
            self.df['longitude'].fillna(value=self.df.iloc[0]['longitude'],
                                        inplace=True)

            loc = Location('latitude', 'longitude')
            self.create_datasource(
                dataframe=self.df,
                sensor_tag='code',
                attribute_tag=['parkingSpotType', 'positionOnMap'],
                unit_value=[],
                bespoke_unit_tag=[],
                description=['description'],
                bespoke_sub_theme=[],
                location_tag=loc,
                sensor_prefix='',
                api_timestamp_tag='api_timestamp_tag',
                is_dependent=True)
            self.importer_status.status = Status.success(__class__.__name__)
        except Exception as e:
            self.importer_status.status = Status.failure(
                __class__.__name__, e.__str__(), traceback.format_exc())