Exemple #1
0
def remove_streams():
    import FeralDecoder_SensorCloudConfig as SensorCloudConfig
    import FeralDecoder_Sensors as SensorConfig

    logger = logging.getLogger("Streams")

    for sensor_id in SensorConfig.sensors:
        sensor_prefix = SensorConfig.sensors[sensor_id][kc.prefix]

        sensor_id_prefix = "%s.%s" % (sensor_prefix, sensor_id)
        location = sensor_id_prefix
        group = sensor_prefix

        api_instance = sensor_cloud.DefaultApi()

        for stream in SensorCloudConfig.streams:
            stream_id = sensor_id_prefix + "." + stream
            try:
                api_instance.observations_delete(stream_id)
                api_instance.streams_id_delete(stream_id)
                logger.info("Deleted stream: %s" % stream_id)
            except:
                logger.exception("Error removeing stream: %s" % stream_id)
                pass

        api_instance.groups_id_delete(sensor_id_prefix)
def run():
    import FeralDecoder_SensorCloudConfig as SensorCloudConfig
    import FeralDecoder_Sensors as SensorConfig

    logger = logging.getLogger("Locations")

    for sensor_id in SensorConfig.sensors:

        sensor_prefix = SensorConfig.sensors[sensor_id][kc.prefix]
        group = sensor_prefix

        sensor_id_prefix = "%s.%s" % (sensor_prefix, sensor_id)
        location = sensor_id_prefix

        api_instance = sensor_cloud.DefaultApi()

        data = api_instance.locations_get(id=location)

        if data.embedded is None:

            body = sensor_cloud.LocationPost(
                id=location,
                organisationid=SensorCloudConfig.organisation,
                description=location,
                groupids=[group],
                geo_json={
                    "type": "Point",
                    "coordinates": [0, 0, 0]
                })

            api_instance.locations_id_put(location, body)
            logger.info("Created location: %s" % location)
        else:
            logger.info("Location: %s already exists, skipping." % location)
Exemple #3
0
def add_streams():
    import FeralDecoder_SensorCloudConfig as SensorCloudConfig
    import FeralDecoder_Sensors as SensorConfig

    logger = logging.getLogger("Streams")

    for sensor_id in SensorConfig.sensors:

        sensor_prefix = SensorConfig.sensors[sensor_id][kc.prefix]

        sensor_id_prefix = "%s.%s" % (sensor_prefix, sensor_id)
        location = sensor_id_prefix
        group = sensor_prefix

        api_instance = sensor_cloud.DefaultApi()

        # If the group does not exist, create it.
        data = api_instance.groups_get(id=sensor_id_prefix)
        if data.embedded is None:
            logger.info("Creating group " + sensor_id_prefix)
            body = sensor_cloud.GroupPost(
                id=sensor_id_prefix,
                name=sensor_id,
                organisationid=SensorCloudConfig.organisation,
                description=sensor_id_prefix,
                groupids=[group])
            api_instance.groups_id_put(sensor_id_prefix, body)
        else:
            logger.info("Group: %s already exists, skipping." %
                        sensor_id_prefix)

        for stream in SensorCloudConfig.streams:
            stream_id = sensor_id_prefix + "." + stream

            stream_data = Namespace(**SensorCloudConfig.streams[stream])

            # If stream does not exist, create it
            data = api_instance.streams_get(id=stream_id)
            if data.embedded is None:
                body = sensor_cloud.StreamPost(
                    stream_id,
                    locationid=location,
                    organisationid=stream_data.organisation,
                    sample_period=stream_data.samplePeriod,
                    reporting_period=stream_data.reportingPeriod,
                    groupids=[group, sensor_id_prefix],
                    stream_metadata=sensor_cloud.StreamMetadata(
                        type=".ScalarStreamMetaData",
                        observed_property=stream_data.observedProperty,
                        unit_of_measure=stream_data.unitOfMeasure,
                        interpolation_type=stream_data.interpolationType,
                    ),
                    resulttype="scalarvalue")
                api_instance.streams_id_put(stream_id, body)
                logger.info("Created stream: %s" % stream_id)

            else:
                logger.info("Stream: %s already exists, skipping." % stream_id)
Exemple #4
0
def run():
    import VaisalaSender_SensorCloudConfig

    print sensor_cloud.configuration.password

    # sensor_cloud.configuration.username = '******'
    # sensor_cloud.configuration.password = '******'

    # sensor_cloud.configuration.username = '******'
    # sensor_cloud.configuration.password = '******'

    api_instance = sensor_cloud.DefaultApi()
    streamid = 'jcu.test'  # str |
    print datetime.now().isoformat()
    print datetime.utcnow().isoformat() + "Z"
    values = {"v": float('nan')}
    values2 = {"v": 28}
    results = [
        UnivariateResult(t=datetime.utcnow().isoformat() + "Z", v=values)
    ]
    # results = [UnivariateResult(t="2015-11-12T00:00:00.000Z", v=values)]

    body = sensor_cloud.ObservationsPost(results=results)  # ObservationsPost |

    # stream_body = sensor_cloud.StreamPost()
    FORMAT = '%(asctime)-15s %(levelname)-7s %(name)s %(filename)s:%(funcName)s:%(lineno)d - %(message)s'
    logging.basicConfig(format=FORMAT, level=logging.DEBUG)

    # hdlr = logging.StreamHandler(sys.stderr)
    # hdlr.setFormatter(logging.Formatter(FORMAT))
    # hdlr.setLevel(logging.NOTSET)
    # logging.root.addHandler(hdlr)

    pprint(body)
    if True:
        # try:
        # Upload observations for a stream
        api_response = api_instance.observations_post(streamid, body)
        # api_response = api_instance.locations_get(id="coen.vaisalia.1")
        # api_response = api_instance.streams_id_get("coen.vaisala.1.temperature", recursive=True)
        # api_instance.streams_id_put("jcu.test.2", )
        print type(api_response)
        print dir(api_response)
        print api_response.attribute_map
        pprint(api_response)
def run():
    import FeralDecoder_SensorCloudConfig as SensorCloudConfig
    import FeralDecoder_Sensors as SensorConfig

    logger = logging.getLogger("Groups")

    parents = {}

    for sensor_id in SensorConfig.sensors:

        sensor_prefix = SensorConfig.sensors[sensor_id][kc.prefix]
        parent_group = sensor_prefix

        sensor_id_prefix = "%s.%s" % (sensor_prefix, sensor_id)
        group = sensor_id_prefix

        api_instance = sensor_cloud.DefaultApi()

        if parent_group not in parents:
            data = api_instance.groups_get(id=parent_group)
            if data.embedded is None:
                body = sensor_cloud.GroupPost(
                    id=parent_group,
                    name=parent_group.capitalize(),
                    organisationid=SensorCloudConfig.organisation)
                api_instance.groups_id_put(parent_group, body)
                logger.info("Created Parent Group: %s" %
                            parent_group.capitalize())

            parents[parent_group] = True

        data = api_instance.groups_get(id=group)

        if data.embedded is None:
            body = sensor_cloud.GroupPost(
                id=group,
                name=sensor_id,
                organisationid=SensorCloudConfig.organisation,
                groupids=[parent_group])
            api_instance.groups_id_put(group, body)
            logger.info("Created Group: %s" % group)
        else:
            logger.info("Location: %s already exists, skipping." % group)
def run():
    import VaisalaSender_SensorCloudConfig as SensorCloudConfig

    for stream in SensorCloudConfig.streams:
        stream_id = SensorCloudConfig.sensor_id_prefix + "." + stream
        # pprint(SensorCloudConfig.streams[stream])
        stream_data = Namespace(**SensorCloudConfig.streams[stream])
        print stream
        print stream_data.interpolationType
        body = sensor_cloud.StreamPost(
            stream_id,
            locationid=stream_data.location,
            organisationid=stream_data.organisation,
            sample_period=stream_data.samplePeriod,
            reporting_period=stream_data.reportingPeriod,
            stream_metadata=sensor_cloud.StreamMetadata(
                type=".ScalarStreamMetaData",
                observed_property=stream_data.observedProperty,
                unit_of_measure=stream_data.unitOfMeasure,
                interpolation_type=stream_data.interpolationType,
            ),
            resulttype="scalarvalue")
        api_instance = sensor_cloud.DefaultApi()
        api_instance.streams_id_put(stream_id, body)
Exemple #7
0
    def upload(self, entry, unpacked, count):

        node_id = entry["devEUI"]
        sensor_prefix = "%s.%s" % (sensors[node_id][kc.prefix], node_id)

        api_instance = sensor_cloud.DefaultApi()
        results = unpacked['results'][:count]

        for result in results:
            self.post_observation(api_instance,
                                  "%s.%s" % (sensor_prefix, fc.Temperature),
                                  result['temperature'], result['time_'])
            self.post_observation(api_instance,
                                  "%s.%s" % (sensor_prefix, fc.Humidity),
                                  result['humidity'], result['time_'])
            self.post_observation(api_instance,
                                  "%s.%s" % (sensor_prefix, fc.Pressure),
                                  result['pressure'], result['time_'])
            self.post_observation(api_instance,
                                  "%s.%s" % (sensor_prefix, fc.Lux),
                                  result['lux'], result['time_'])
            self.post_observation(api_instance,
                                  "%s.%s" % (sensor_prefix, fc.BatteryVoltage),
                                  result['battery'], result['time_'])