Example #1
0
def publishBirths():
    print("Publishing Birth")

    # Create the NBIRTH payload
    payload = sparkplug.getNodeBirthPayload()

    # Add the Node Controls
    addMetric(payload, "Node Control/Next Server", None, MetricDataType.Boolean, False)
    addMetric(payload, "Node Control/Rebirth", None, MetricDataType.Boolean, False)
    addMetric(payload, "Node Control/Reboot", None, MetricDataType.Boolean, False)

    # Set up the device Parameters
    p = subprocess.Popen('uname -a', shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
    for line in p.stdout.readlines():
        unameOutput = line,
    retVal = p.wait()
    p = subprocess.Popen('cat /proc/cpuinfo | grep Hardware', shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
    for line in p.stdout.readlines():
        hardwareOutput = line,
    retVal = p.wait()
    p = subprocess.Popen('cat /proc/cpuinfo | grep Revision', shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
    for line in p.stdout.readlines():
        revisionOutput = line,
    retVal = p.wait()
    p = subprocess.Popen('cat /proc/cpuinfo | grep Serial', shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
    for line in p.stdout.readlines():
        serialOutput = line,
    retVal = p.wait()
    addMetric(payload, "Parameters/sw_version", None, MetricDataType.String, ''.join(unameOutput))
    addMetric(payload, "Parameters/hw_version", None, MetricDataType.String, ''.join(hardwareOutput))
    addMetric(payload, "Parameters/hw_revision", None, MetricDataType.String, ''.join(revisionOutput))
    addMetric(payload, "Parameters/hw_serial", None, MetricDataType.String, ''.join(serialOutput))

    # Publish the NBIRTH certificate
    byteArray = bytearray(payload.SerializeToString())
    client.publish("spBv1.0/" + myGroupId + "/NBIRTH/" + myNodeName, byteArray, 0, False)

    # Set up the DBIRTH with the input metrics
    payload = sparkplug.getDeviceBirthPayload()

    addMetric(payload, "Inputs/a", None, MetricDataType.Boolean, pibrella.input.a.read())
    addMetric(payload, "Inputs/b", None, MetricDataType.Boolean, pibrella.input.b.read())
    addMetric(payload, "Inputs/c", None, MetricDataType.Boolean, pibrella.input.c.read())
    addMetric(payload, "Inputs/d", None, MetricDataType.Boolean, pibrella.input.d.read())

    # Set up the output states on first run so Ignition and MQTT Engine are aware of them
    addMetric(payload, "Outputs/e", None, MetricDataType.Boolean, pibrella.output.e.read())
    addMetric(payload, "Outputs/f", None, MetricDataType.Boolean, pibrella.output.f.read())
    addMetric(payload, "Outputs/g", None, MetricDataType.Boolean, pibrella.output.g.read())
    addMetric(payload, "Outputs/h", None, MetricDataType.Boolean, pibrella.output.h.read())
    addMetric(payload, "Outputs/LEDs/green", None, MetricDataType.Boolean, pibrella.light.green.read())
    addMetric(payload, "Outputs/LEDs/red", None, MetricDataType.Boolean, pibrella.light.red.read())
    addMetric(payload, "Outputs/LEDs/yellow", None, MetricDataType.Boolean, pibrella.light.yellow.read())
    addMetric(payload, "button", None, MetricDataType.Boolean, pibrella.button.read())
    addMetric(payload, "buzzer_fail", None, MetricDataType.Boolean, 0)
    addMetric(payload, "buzzer_success", None, MetricDataType.Boolean, 0)

    # Publish the initial data with the DBIRTH certificate
    totalByteArray = bytearray(payload.SerializeToString())
    client.publish("spBv1.0/" + myGroupId + "/DBIRTH/" + myNodeName + "/" + mySubNodeName, totalByteArray, 0, False)
Example #2
0
def publishNodeBirth():
  
    payload = sparkplug.getNodeBirthPayload()

    addMetric(payload, "Node Control/Next Server", AliasMap.Next_Server, MetricDataType.Boolean, False)
    addMetric(payload, "Node Control/Rebirth", AliasMap.Rebirth, MetricDataType.Boolean, False)
    addMetric(payload, "Node Control/Reboot", AliasMap.Reboot, MetricDataType.Boolean, False)
    return bytearray(payload.SerializeToString())
Example #3
0
def publishNodeBirth():
    print( "Publishing Node Birth")

    # Create the node birth payload
    payload = sparkplug.getNodeBirthPayload()

    # Set up the Node Controls
    addMetric(payload, "Node Control/Next Server", AliasMap.Next_Server, MetricDataType.Boolean, False)
    addMetric(payload, "Node Control/Rebirth", AliasMap.Rebirth, MetricDataType.Boolean, False)
    addMetric(payload, "Node Control/Reboot", AliasMap.Reboot, MetricDataType.Boolean, False)

    # Add some regular node metrics
    addMetric(payload, "Node Metric0", AliasMap.Node_Metric0, MetricDataType.String, "hello node")
    addMetric(payload, "Node Metric1", AliasMap.Node_Metric1, MetricDataType.Boolean, True)
    addNullMetric(payload, "Node Metric3", AliasMap.Node_Metric3, MetricDataType.Int32)

    # Create a DataSet (012 - 345) two rows with Int8, Int16, and Int32 contents and headers Int8s, Int16s, Int32s and add it to the payload
    columns = ["Int8s", "Int16s", "Int32s"]
    types = [DataSetDataType.Int8, DataSetDataType.Int16, DataSetDataType.Int32]
    dataset = initDatasetMetric(payload, "DataSet", AliasMap.Dataset, columns, types)
    row = dataset.rows.add()
    element = row.elements.add();
    element.int_value = 0
    element = row.elements.add();
    element.int_value = 1
    element = row.elements.add();
    element.int_value = 2
    row = dataset.rows.add()
    element = row.elements.add();
    element.int_value = 3
    element = row.elements.add();
    element.int_value = 4
    element = row.elements.add();
    element.int_value = 5

    # Add a metric with a custom property
    metric = addMetric(payload, "Node Metric2", AliasMap.Node_Metric2, MetricDataType.Int16, 13)
    metric.properties.keys.extend(["engUnit"])
    propertyValue = metric.properties.values.add()
    propertyValue.type = ParameterDataType.String
    propertyValue.string_value = "MyCustomUnits"

    # Create the UDT definition value which includes two UDT members and a single parameter and add it to the payload
    template = initTemplateMetric(payload, "_types_/Custom_Motor", None, None)    # No alias for Template definitions
    templateParameter = template.parameters.add()
    templateParameter.name = "Index"
    templateParameter.type = ParameterDataType.String
    templateParameter.string_value = "0"
    addMetric(template, "RPMs", None, MetricDataType.Int32, 0)    # No alias in UDT members
    addMetric(template, "AMPs", None, MetricDataType.Int32, 0)    # No alias in UDT members

    # Publish the node birth certificate
    byteArray = bytearray(payload.SerializeToString())
    client.publish("spBv1.0/" + myGroupId + "/NBIRTH/" + myNodeName, byteArray, 0, False)
Example #4
0
def publishNodeBirth():
    print "Publishing Node Birth"

    # Create the node birth payload
    payload = sparkplug.getNodeBirthPayload()

    # Set up the Node Controls
    addMetric(payload, "Node Control/Next Server", AliasMap.Next_Server, MetricDataType.Boolean, False)
    addMetric(payload, "Node Control/Rebirth", AliasMap.Rebirth, MetricDataType.Boolean, False)
    addMetric(payload, "Node Control/Reboot", AliasMap.Reboot, MetricDataType.Boolean, False)

    # Add some regular node metrics
    addMetric(payload, "Node Metric0", AliasMap.Node_Metric0, MetricDataType.String, "hello node")
    addMetric(payload, "Node Metric1", AliasMap.Node_Metric1, MetricDataType.Boolean, True)
    addNullMetric(payload, "Node Metric3", AliasMap.Node_Metric3, MetricDataType.Int32)

    # Create a DataSet (012 - 345) two rows with Int8, Int16, and Int32 contents and headers Int8s, Int16s, Int32s and add it to the payload
    columns = ["Int8s", "Int16s", "Int32s"]
    types = [DataSetDataType.Int8, DataSetDataType.Int16, DataSetDataType.Int32]
    dataset = initDatasetMetric(payload, "DataSet", AliasMap.Dataset, columns, types)
    row = dataset.rows.add()
    element = row.elements.add();
    element.int_value = 0
    element = row.elements.add();
    element.int_value = 1
    element = row.elements.add();
    element.int_value = 2
    row = dataset.rows.add()
    element = row.elements.add();
    element.int_value = 3
    element = row.elements.add();
    element.int_value = 4
    element = row.elements.add();
    element.int_value = 5

    # Add a metric with a custom property
    metric = addMetric(payload, "Node Metric2", AliasMap.Node_Metric2, MetricDataType.Int16, 13)
    metric.properties.keys.extend(["engUnit"])
    propertyValue = metric.properties.values.add()
    propertyValue.type = ParameterDataType.String
    propertyValue.string_value = "MyCustomUnits"

    # Create the UDT definition value which includes two UDT members and a single parameter and add it to the payload
    template = initTemplateMetric(payload, "_types_/Custom_Motor", None, None)    # No alias for Template definitions
    templateParameter = template.parameters.add()
    templateParameter.name = "Index"
    templateParameter.type = ParameterDataType.String
    templateParameter.string_value = "0"
    addMetric(template, "RPMs", None, MetricDataType.Int32, 0)    # No alias in UDT members
    addMetric(template, "AMPs", None, MetricDataType.Int32, 0)    # No alias in UDT members

    # Publish the node birth certificate
    byteArray = bytearray(payload.SerializeToString())
    client.publish("spBv1.0/" + myGroupId + "/NBIRTH/" + myNodeName, byteArray, 0, False)
Example #5
0
    def publishBirth(self, *args):
        self.logger.debug("Publishing Node Birth")

        # Create the node birth payload
        payload = sparkplug.getNodeBirthPayload()

        # Add all metrics to payload, including custom properties
        for metric in self.metrics:
            m = sparkplug.addMetric(payload, metric.name, metric.alias,
                                    metric.datatype, metric.initial)

            super().add_properties_to_metric(m, metric.property_list)

        byteArray = bytearray(payload.SerializeToString())
        self.client.publish(
            "spBv1.0/" + self.group_id + "/NBIRTH/" + self.node_id, byteArray,
            0, False)
def publishNodeBirth():
    print("Publishing Node Birth")

    # Create the node birth payload
    payload = sparkplug.getNodeBirthPayload()

    # Set up the Node Controls
    addMetric(payload, "Node Control/Next Server", AliasMap.Next_Server,
              MetricDataType.Boolean, False)
    addMetric(payload, "Node Control/Rebirth", AliasMap.Rebirth,
              MetricDataType.Boolean, False)
    addMetric(payload, "Node Control/Reboot", AliasMap.Reboot,
              MetricDataType.Boolean, False)

    # Publish the node birth certificate
    byteArray = bytearray(payload.SerializeToString())
    client.publish("spBv1.0/" + myGroupId + "/NBIRTH/" + myNodeName, byteArray,
                   0, False)
Example #7
0
 def __initPayload(self):
     #Dependency with external library sparkplug
     self.__payload = sparkplug.getNodeBirthPayload()