class Jevois(Device): options = {"objects":"","objectidentified":True,"objectlocation":True,"objectsize":False} objectLocationX = Gauge("object_location_x", "Identified object's x position", ["device_id","device_type","station"]) objectLocationY = Gauge("object_location_y", "Identified object's y position", ["device_id","device_type","station"]) objectLocationZ = Gauge("object_location_z", "Identified object's Z position", ["device_id","device_type","station"]) objectSize = Gauge("object_size","Identified object's size", ["device_id","device_type","station"]) def connect(self): try: self.serial = serial.Serial(self.port, 115200, timeout=0) self.__initialize() except Exception as e: raise Exception(str(e)) def __initialize(self): if self.isEnabled("objectidentified"): if self.section["objects"] is not None: self.objects = [" "] # remove extension part and add to the objects list for obj in self.section["objects"].split(): self.objects.append(obj.split(".")[0]) self.objectIdentified = Enum("object_id_"+self.port, "Object Identified", ["device_id","device_type","station"], states=self.objects) else: raise Exception("The \"objects\" list is necessary for monitoring identified objects") def fetch(self): line = self.serial.readline().rstrip().decode() tok = line.split() # in case of no identified object (empty message) or malformed line (as a message with Normal serstyle has 6 fields) skip fetching if len(tok) < 6: if self.isEnabled("objectidentified"): self.objectIdentified.labels(device_id=self.id, device_type=self.type, station=self.host).state(" ") Jevois.objectLocationX.labels(device_id=self.id, device_type=self.type, station=self.host).set(0) Jevois.objectLocationY.labels(device_id=self.id, device_type=self.type, station=self.host).set(0) Jevois.objectLocationZ.labels(device_id=self.id, device_type=self.type, station=self.host).set(0) Jevois.objectSize.labels(device_id=self.id, device_type=self.type, station=self.host).set(0) self.serial.flushInput() return serstyle = tok[0][0] dimension = tok[0][1] # If the serstyle is not Normal (thus it is not supported by the module) if (serstyle != "N"): raise Exception("Unsupported serstyle (" + serstyle + ")") if dimension == "1" and len(tok) != 4: raise Exception("Malformed line (expected 4 fields but received " + str(len(tok)) + ")") if dimension == "2" and len(tok) != 6: raise Exception("Malformed line (expected 6 fields but received " + str(len(tok)) + ")") if dimension == "3" and len(tok) != 8: raise Exception("Malformed line (expected 8 fields but received " + str(len(tok)) + ")") if self.isEnabled("objectidentified"): if len(self.objects) > 1: obj = tok[1].split(".")[0] if obj in self.objects: self.objectIdentified.labels(device_id=self.id, device_type=self.type, station=self.host).state(obj) else: self.objectIdentified.labels(device_id=self.id, device_type=self.type, station=self.host).state(" ") else: raise Exception("The \"objects\" list exists but is empty") if self.isEnabled("objectlocation"): Jevois.objectLocationX.labels(device_id=self.id, device_type=self.type, station=self.host).set(float(tok[2])) if int(dimension) > 1: Jevois.objectLocationY.labels(device_id=self.id, device_type=self.type, station=self.host).set(float(tok[3])) if int(dimension) == 3: Jevois.objectLocationZ.labels(device_id=self.id, device_type=self.type, station=self.host).set(float(tok[4])) if self.isEnabled("objectsize"): if dimension == "1": Jevois.objectSize.labels(device_id=self.id, device_type=self.type, station=self.host).set(float(tok[3])) elif dimension == "2": Jevois.objectSize.labels(device_id=self.id, device_type=self.type, station=self.host).set(abs(float(tok[4])*float(tok[5]))) elif dimension == "3": Jevois.objectSize.labels(device_id=self.id, device_type=self.type, station=self.host).set(abs(float(tok[5])*float(tok[6])*float(tok[7]))) self.serial.flushInput() def disconnect(self): self.serial.close()
class Dobot(Device): options = {"devicesn":True,"devicename":True,"deviceversion":True,"devicetime":False,"queueindex":False, "posex":True,"posey":True,"posez":True,"poser":True,"anglebase":True,"anglereararm":True,"angleforearm":True, "angleendeffector":True,"alarmsstate":True,"homex":False,"homey":False,"homez":False,"homer":False, "endeffectorx":False,"endeffectory":False,"endeffectorz":False,"laserstatus":False,"suctioncupstatus":False,"gripperstatus":False,"jogbasevelocity":False, "jogreararmvelocity":False,"jogforearmvelocity":False,"jogendeffectorvelocity":False,"jogbaseacceleration":False,"jogreararmacceleration":False, "jogforearmacceleration":False,"jogendeffectoracceleration":False,"jogaxisxvelocity":False,"jogaxisyvelocity":False,"jogaxiszvelocity":False, "jogaxisrvelocity":False,"jogaxisxacceleration":False,"jogaxisyacceleration":False,"jogaxiszacceleration":False,"jogaxisracceleration":False, "jogvelocityratio":False,"jogaccelerationratio":False,"ptpbasevelocity":False,"ptpreararmvelocity":False, "ptpforearmvelocity":False,"ptpendeffectorvelocity":False,"ptpbaseacceleration":False,"ptpreararmacceleration":False, "ptpforearmacceleration":False,"ptpendeffectoracceleration":False,"ptpaxisxyzvelocity":False, "ptpaxisrvelocity":False,"ptpaxisxyzacceleration":False,"ptpaxisracceleration":False,"ptpvelocityratio":False, "ptpaccelerationratio":False,"liftingheight":False,"heightlimit":False, "cpvelocity":False,"cpacceleration":False,"arcxyzvelocity":False,"arcrvelocity":False, "arcxyzacceleration":False,"arcracceleration":False,"anglestaticerrrear":False, "anglestaticerrfront":False,"anglecoefrear":False,"anglecoeffront":False,"slidingrailstatus":False, "slidingrailpose":False,"slidingrailjogvelocity":False,"slidingrailjogacceleration":False, "slidingrailptpvelocity":False,"slidingrailptpacceleration":False,"wifimodulestatus":False, "wificonnectionstatus":False,"wifissid":False,"wifipassword":False,"wifiipaddress":False, "wifinetmask":False,"wifigateway":False,"wifidns":False} deviceInfo = Info("dobot_magician", "General information about monitored Dobot Magician device", ["device_id","device_type","station"]) wifiInfo = Info("wifi", "Information regarding the device's wifi connection", ["device_id","device_type","station"]) deviceTime = Gauge("device_time","Device's clock/time", ["device_id","device_type","station"]) queueIndex = Gauge("queue_index","Current index in command queue", ["device_id","device_type","station"]) poseX = Gauge("pose_x","Real-time cartesian coordinate of device's X axis", ["device_id","device_type","station"]) poseY = Gauge("pose_y","Real-time cartesian coordinate of device's Y axis", ["device_id","device_type","station"]) poseZ = Gauge("pose_z","Real-time cartesian coordinate of device's Z axis", ["device_id","device_type","station"]) poseR = Gauge("pose_r","Real-time cartesian coordinate of device's R axis", ["device_id","device_type","station"]) angleBase = Gauge("angle_base","Base joint angle", ["device_id","device_type","station"]) angleRearArm = Gauge("angle_rear_arm","Rear arm joint angle", ["device_id","device_type","station"]) angleForearm = Gauge("angle_forearm","Forearm joint angle", ["device_id","device_type","station"]) angleEndEffector = Gauge("angle_end_effector","End effector joint angle", ["device_id","device_type","station"]) alarmsState = Enum("alarms_state", "Device alarms state", ["device_id","device_type","station"], states=dTypeX.alarmStates) homeX = Gauge("home_x","Home position for X axis", ["device_id","device_type","station"]) homeY = Gauge("home_y","Home position for Y axis", ["device_id","device_type","station"]) homeZ = Gauge("home_z","Home position for Z axis", ["device_id","device_type","station"]) homeR = Gauge("home_r","Home position for R axis", ["device_id","device_type","station"]) endEffectorX = Gauge("end_effector_x","X-axis offset of end effector", ["device_id","device_type","station"]) endEffectorY = Gauge("end_effector_y","Y-axis offset of end effector", ["device_id","device_type","station"]) endEffectorZ = Gauge("end_effector_z","Z-axis offset of end effector", ["device_id","device_type","station"]) laserStatus = Enum("laser_status","Status (enabled/disabled) of laser", ["device_id","device_type","station"], states=["enabled","disabled"]) suctionCupStatus = Enum("suction_cup_status","Status (enabled/disabled) of suction cup", ["device_id","device_type","station"], states=["enabled","disabled"]) gripperStatus = Enum("gripper_status","Status (enabled/disabled) of gripper", ["device_id","device_type","station"], states=["enabled","disabled"]) jogBaseVelocity = Gauge("jog_base_velocity","Velocity (°/s) of base joint in jogging mode", ["device_id","device_type","station"]) jogRearArmVelocity = Gauge("jog_rear_arm_velocity","Velocity (°/s) of rear arm joint in jogging mode", ["device_id","device_type","station"]) jogForearmVelocity = Gauge("jog_forearm_velocity","Velocity (°/s) of forearm joint in jogging mode", ["device_id","device_type","station"]) jogEndEffectorVelocity = Gauge("jog_end_effector_velocity","Velocity (°/s) of end effector joint in jogging mode", ["device_id","device_type","station"]) jogBaseAcceleration = Gauge("jog_base_acceleration","Acceleration (°/s^2) of base joint in jogging mode", ["device_id","device_type","station"]) jogRearArmAcceleration = Gauge("jog_rear_arm_acceleration","Acceleration (°/s^2) of rear arm joint in jogging mode", ["device_id","device_type","station"]) jogForearmAcceleration = Gauge("jog_forearm_acceleration","Acceleration (°/s^2) of forearm joint in jogging mode", ["device_id","device_type","station"]) jogEndEffectorAcceleration = Gauge("jog_end_effector_acceleration","Acceleration (°/s^2) of end effector joint in jogging mode", ["device_id","device_type","station"]) jogAxisXVelocity = Gauge("jog_axis_x_velocity","Velocity (mm/s) of device's X axis (cartesian coordinate) in jogging mode", ["device_id","device_type","station"]) jogAxisYVelocity = Gauge("jog_axis_y_velocity","Velocity (mm/s) of device's Y axis (cartesian coordinate) in jogging mode", ["device_id","device_type","station"]) jogAxisZVelocity = Gauge("jog_axis_z_velocity","Velocity (mm/s) of device's Z axis (cartesian coordinate) in jogging mode", ["device_id","device_type","station"]) jogAxisRVelocity = Gauge("jog_axis_r_velocity","Velocity (mm/s) of device's R axis (cartesian coordinate) in jogging mode", ["device_id","device_type","station"]) jogAxisXAcceleration = Gauge("jog_axis_x_acceleration","Acceleration (mm/s^2) of device's X axis (cartesian coordinate) in jogging mode", ["device_id","device_type","station"]) jogAxisYAcceleration = Gauge("jog_axis_y_acceleration","Acceleration (mm/s^2) of device's Y axis (cartesian coordinate) in jogging mode", ["device_id","device_type","station"]) jogAxisZAcceleration = Gauge("jog_axis_z_acceleration","Acceleration (mm/s^2) of device's Z axis (cartesian coordinate) in jogging mode", ["device_id","device_type","station"]) jogAxisRAcceleration = Gauge("jog_axis_r_acceleration","Acceleration (mm/s^2) of device's R axis (cartesian coordinate) in jogging mode", ["device_id","device_type","station"]) jogVelocityRatio = Gauge("jog_velocity_ratio","Velocity ratio of all axis (joint and cartesian coordinate system) in jogging mode", ["device_id","device_type","station"]) jogAccelerationRatio = Gauge("jog_acceleration_ratio","Acceleration ratio of all axis (joint and cartesian coordinate system) in jogging mode", ["device_id","device_type","station"]) ptpBaseVelocity = Gauge("ptp_base_velocity","Velocity (°/s) of base joint in point to point mode", ["device_id","device_type","station"]) ptpRearArmVelocity = Gauge("ptp_rear_arm_velocity","Velocity (°/s) of rear arm joint in point to point mode", ["device_id","device_type","station"]) ptpForearmVelocity = Gauge("ptp_forearm_velocity","Velocity (°/s) of forearm joint in point to point mode", ["device_id","device_type","station"]) ptpEndEffectorVelocity = Gauge("ptp_end_effector_velocity","Velocity (°/s) of end effector joint in point to point mode", ["device_id","device_type","station"]) ptpBaseAcceleration = Gauge("ptp_base_acceleration","Acceleration (°/s^2) of base joint in point to point mode", ["device_id","device_type","station"]) ptpRearArmAcceleration = Gauge("ptp_rear_arm_acceleration","Acceleration (°/s^2) of rear arm joint in point to point mode", ["device_id","device_type","station"]) ptpForearmAcceleration = Gauge("ptp_forearm_acceleration","Acceleration (°/s^2) of forearm joint in point to point mode", ["device_id","device_type","station"]) ptpEndEffectorAcceleration = Gauge("ptp_end_effector_acceleration","Acceleration (°/s^2) of end effector joint in point to point mode", ["device_id","device_type","station"]) ptpAxisXYZVelocity = Gauge("ptp_axis_xyz_velocity","Velocity (mm/s) of device's X, Y, Z axis (cartesian coordinate) in point to point mode", ["device_id","device_type","station"]) ptpAxisRVelocity = Gauge("ptp_axis_r_velocity","Velocity (mm/s) of device's R axis (cartesian coordinate) in point to point mode", ["device_id","device_type","station"]) ptpAxisXYZAcceleration = Gauge("ptp_axis_x_y_z_acceleration","Acceleration (mm/s^2) of device's X, Y, Z axis (cartesian coordinate) in point to point mode", ["device_id","device_type","station"]) ptpAxisRAcceleration = Gauge("ptp_axis_r_acceleration","Acceleration (mm/s^2) of device's R axis (cartesian coordinate) in point to point mode", ["device_id","device_type","station"]) ptpVelocityRatio = Gauge("ptp_velocity_ratio","Velocity ratio of all axis (joint and cartesian coordinate system) in point to point mode", ["device_id","device_type","station"]) ptpAccelerationRatio = Gauge("ptp_acceleration_ratio","Acceleration ratio of all axis (joint and cartesian coordinate system) in point to point mode", ["device_id","device_type","station"]) liftingHeight = Gauge("lifting_height","Lifting height in jump mode", ["device_id","device_type","station"]) heightLimit = Gauge("height_limit","Max lifting height in jump mode", ["device_id","device_type","station"]) cpVelocity = Gauge("cp_velocity","Velocity (mm/s) in cp mode", ["device_id","device_type","station"]) cpAcceleration = Gauge("cp_acceleration","Acceleration (mm/s^2) in cp mode", ["device_id","device_type","station"]) arcXYZVelocity = Gauge("arc_x_y_z_velocity","Velocity (mm/s) of X, Y, Z axis in arc mode", ["device_id","device_type","station"]) arcRVelocity = Gauge("arc_r_velocity","Velocity (mm/s) of R axis in arc mode", ["device_id","device_type","station"]) arcXYZAcceleration = Gauge("arc_x_y_z_acceleration","Acceleration (mm/s^2) of X, Y, Z axis in arc mode", ["device_id","device_type","station"]) arcRAcceleration = Gauge("arc_r_acceleration","Acceleration (mm/s^2) of R axis in arc mode", ["device_id","device_type","station"]) angleStaticErrRear = Gauge("angle_static_err_rear","Rear arm angle sensor static error", ["device_id","device_type","station"]) angleStaticErrFront = Gauge("arc_static_err_front","Forearm angle sensor static error", ["device_id","device_type","station"]) angleCoefRear = Gauge("angle_coef_rear","Rear arm angle sensor linearization parameter", ["device_id","device_type","station"]) angleCoefFront = Gauge("angle_coef_front","Forearm angle sensor linearization parameter", ["device_id","device_type","station"]) slidingRailStatus = Enum("sliding_rail_status","Sliding rail's status (enabled/disabled)", ["device_id","device_type","station"], states=["enabled","disabled"]) slidingRailPose = Gauge("sliding_rail_pose","Sliding rail's real-time pose in mm", ["device_id","device_type","station"]) slidingRailJogVelocity = Gauge("sliding_rail_jog_velocity","Velocity (mm/s) of sliding rail in jogging mode", ["device_id","device_type","station"]) slidingRailJogAcceleration = Gauge("sliding_rail_jog_acceleration","Acceleration (mm/s^2) of sliding rail in jogging mode", ["device_id","device_type","station"]) slidingRailPtpVelocity = Gauge("sliding_rail_ptp_velocity","Velocity (mm/s) of sliding rail in point to point mode", ["device_id","device_type","station"]) slidingRailPtpAcceleration = Gauge("sliding_rail_ptp_acceleration","Acceleration (mm/s^2) of sliding rail in point to point mode", ["device_id","device_type","station"]) wifiModuleStatus = Enum("wifi_module_status","Wifi module status (enabled/disabled)", ["device_id","device_type","station"], states=["enabled","disabled"]) wifiConnectionStatus = Enum("wifi_connection_status","Wifi connection status (connected/not connected)", ["device_id","device_type","station"], states=["enabled","disabled"]) def connect(self): stateInfo = {1:"Not Found", 2:"Occupied"} try: self.api, state = dTypeX.ConnectDobotX(self.port) if state[0] == dTypeX.DobotConnect.DobotConnect_NoError: self.__initialize() else: raise Exception(stateInfo[state[0]]) except Exception as e: raise Exception(str(e)) def __initialize(self): enabledDeviceInfo = {} if self.isEnabled("devicesn"): enabledDeviceInfo["serial"] = dTypeX.GetDeviceSN(self.api)[0] if self.isEnabled("devicename"): enabledDeviceInfo["name"] = dTypeX.GetDeviceName(self.api)[0] if self.isEnabled("deviceversion"): enabledDeviceInfo["version"] = ".".join(list(map(str, dTypeX.GetDeviceVersion(self.api)))) if len(enabledDeviceInfo) > 0: Dobot.deviceInfo.labels(device_id=self.id, device_type=self.type, station=self.host).info(enabledDeviceInfo) enabledWifiInfo = {} if self.isEnabled("wifissid"): enabledWifiInfo["ssid"] = dTypeX.GetWIFISSID(self.api)[0] if self.isEnabled("wifipassword"): enabledWifiInfo["password"] = dTypeX.GetWIFIPassword(self.api)[0] if self.isEnabled("wifiipaddress"): enabledWifiInfo["ip_address"] = ".".join(list(map(str, dTypeX.GetWIFIIPAddress(self.api)[1:]))) if self.isEnabled("wifinetmask"): enabledWifiInfo["netmask"] = ".".join(list(map(str, dTypeX.GetWIFINetmask(self.api)))) if self.isEnabled("wifigateway"): enabledWifiInfo["gateway"] = ".".join(list(map(str, dTypeX.GetWIFIGateway(self.api)))) if self.isEnabled("wifidns"): enabledWifiInfo["dns"] = ".".join(list(map(str, dTypeX.GetWIFIDNS(self.api)))) if len(enabledWifiInfo) > 0: Dobot.wifiInfo.labels(device_id=self.id, device_type=self.type, station=self.host).info(enabledWifiInfo) self.GetPose = self.isCallEnabled(["posex","posey","posez","poser","anglebase","anglereararm","angleforearm","angleendeffector"]) self.GetHomeParams = self.isCallEnabled(["homex","homey","homez","homer"]) self.GetEndEffectorParams = self.isCallEnabled(["endeffectorx","endeffectory","endeffectorz"]) self.GetJOGGointParams = self.isCallEnabled(["jogbasevelocity","jogreararmvelocity","jogforearmvelocity","jogendeffectorvelocity", "jogbaseacceleration","jogreararmacceleration","jogforearmacceleration","jogendeffectoracceleration"]) self.GetJOGCoordinateParams = self.isCallEnabled(["jogaxisxvelocity","jogaxisyvelocity","jogaxiszvelocity","jogaxisrvelocity", "jogaxisxacceleration","jogaxisyacceleration","jogaxiszacceleration","jogaxisracceleration"]) self.GetJOGCommonParams = self.isCallEnabled(["jogvelocityratio","jogaccelerationratio"]) self.GetPTPJointParams = self.isCallEnabled(["ptpbasevelocity","ptpreararmvelocity","ptpforearmvelocity","ptpendeffectorvelocity", "ptpbaseacceleration","ptpreararmacceleration","ptpforearmacceleration","ptpendeffectoracceleration"]) self.GetPTPCoordinateParams = self.isCallEnabled(["ptpaxisxyzvelocity","ptpaxisrvelocity","ptpaxisxyzacceleration","ptpaxisracceleration"]) self.GetPTPCommonParams = self.isCallEnabled(["ptpvelocityratio","ptpaccelerationratio"]) self.GetPTPJumpParams = self.isCallEnabled(["liftingheight","heightlimit"]) self.GetCPParams = self.isCallEnabled(["cpvelocity","cpacceleration"]) self.GetARCParams = self.isCallEnabled(["arcxyzvelocity","arcrvelocity","arcxyzacceleration","arcracceleration"]) self.GetAngleSensorStaticError = self.isCallEnabled(["anglestaticerrrear","anglestaticerrfront"]) self.GetAngleSensorCoef = self.isCallEnabled(["anglecoefrear","anglecoeffront"]) self.GetJOGLParams = self.isCallEnabled(["slidingrailjogvelocity","slidingrailjogacceleration"]) self.GetPTPLParams = self.isCallEnabled(["slidingrailptpvelocity","slidingrailptpacceleration"]) def fetch(self): if self.isEnabled("devicetime"): Dobot.deviceTime.labels(device_id=self.id, device_type=self.type, station=self.host).set(dTypeX.GetDeviceTime(self.api)[0]) if self.isEnabled("queueindex"): Dobot.queueIndex.labels(device_id=self.id, device_type=self.type, station=self.host).set(dTypeX.GetQueuedCmdCurrentIndex(self.api)[0]) if self.GetPose: pose = dTypeX.GetPose(self.api) if self.isEnabled("posex"): Dobot.poseX.labels(device_id=self.id, device_type=self.type, station=self.host).set(pose[0]) if self.isEnabled("posey"): Dobot.poseY.labels(device_id=self.id, device_type=self.type, station=self.host).set(pose[1]) if self.isEnabled("posez"): Dobot.poseZ.labels(device_id=self.id, device_type=self.type, station=self.host).set(pose[2]) if self.isEnabled("poser"): Dobot.poseR.labels(device_id=self.id, device_type=self.type, station=self.host).set(pose[3]) if self.isEnabled("anglebase"): Dobot.angleBase.labels(device_id=self.id, device_type=self.type, station=self.host).set(pose[4]) if self.isEnabled("anglereararm"): Dobot.angleRearArm.labels(device_id=self.id, device_type=self.type, station=self.host).set(pose[5]) if self.isEnabled("angleforearm"): Dobot.angleForearm.labels(device_id=self.id, device_type=self.type, station=self.host).set(pose[6]) if self.isEnabled("angleendeffector"): Dobot.angleEndEffector.labels(device_id=self.id, device_type=self.type, station=self.host).set(pose[7]) if self.isEnabled("alarmsstate"): alarmsList = dTypeX.GetAlarmsStateX(self.api) if len(alarmsList) == 0: Dobot.alarmsState.labels(device_id=self.id, device_type=self.type, station=self.host).state(" ") else: for a in alarmsList: Dobot.alarmsState.labels(device_id=self.id, device_type=self.type, station=self.host).state(a) if self.GetHomeParams: home = dTypeX.GetHOMEParams(self.api) if self.isEnabled("homex"): Dobot.homeX.labels(device_id=self.id, device_type=self.type, station=self.host).set(home[0]) if self.isEnabled("homey"): Dobot.homeY.labels(device_id=self.id, device_type=self.type, station=self.host).set(home[1]) if self.isEnabled("homez"): Dobot.homeZ.labels(device_id=self.id, device_type=self.type, station=self.host).set(home[2]) if self.isEnabled("homer"): Dobot.homeR.labels(device_id=self.id, device_type=self.type, station=self.host).set(home[3]) if self.GetEndEffectorParams: endEffector = dTypeX.GetEndEffectorParams(self.api) if self.isEnabled("endeffectorx"): Dobot.endEffectorX.labels(device_id=self.id, device_type=self.type, station=self.host).set(endEffector[0]) if self.isEnabled("endeffectory"): Dobot.endEffectorY.labels(device_id=self.id, device_type=self.type, station=self.host).set(endEffector[1]) if self.isEnabled("endeffectorz"): Dobot.endEffectorZ.labels(device_id=self.id, device_type=self.type, station=self.host).set(endEffector[2]) if self.isEnabled("laserstatus"): if bool(dTypeX.GetEndEffectorLaser(self.api)[0]): Dobot.laserStatus.labels(device_id=self.id, device_type=self.type, station=self.host).state("enabled") else: Dobot.laserStatus.labels(device_id=self.id, device_type=self.type, station=self.host).state("disabled") if self.isEnabled("suctioncupstatus"): if bool(dTypeX.GetEndEffectorSuctionCup(self.api)[0]): Dobot.suctionCupStatus.labels(device_id=self.id, device_type=self.type, station=self.host).state("enabled") else: Dobot.suctionCupStatus.labels(device_id=self.id, device_type=self.type, station=self.host).state("disabled") if self.isEnabled("gripperstatus"): if bool(dTypeX.GetEndEffectorGripper(self.api)[0]): Dobot.gripperStatus.labels(device_id=self.id, device_type=self.type, station=self.host).state("enabled") else: Dobot.gripperStatus.labels(device_id=self.id, device_type=self.type, station=self.host).state("disabled") if self.GetJOGGointParams: jogJoints = dTypeX.GetJOGJointParams(self.api) if self.isEnabled("jogbasevelocity"): Dobot.jogBaseVelocity.labels(device_id=self.id, device_type=self.type, station=self.host).set(jogJoints[0]) if self.isEnabled("jogreararmvelocity"): Dobot.jogRearArmVelocity.labels(device_id=self.id, device_type=self.type, station=self.host).set(jogJoints[1]) if self.isEnabled("jogforearmvelocity"): Dobot.jogForearmVelocity.labels(device_id=self.id, device_type=self.type, station=self.host).set(jogJoints[2]) if self.isEnabled("jogendeffectorvelocity"): Dobot.jogEndEffectorVelocity.labels(device_id=self.id, device_type=self.type, station=self.host).set(jogJoints[3]) if self.isEnabled("jogbaseacceleration"): Dobot.jogBaseAcceleration.labels(device_id=self.id, device_type=self.type, station=self.host).set(jogJoints[4]) if self.isEnabled("jogreararmacceleration"): Dobot.jogRearArmAcceleration.labels(device_id=self.id, device_type=self.type, station=self.host).set(jogJoints[5]) if self.isEnabled("jogforearmacceleration"): Dobot.jogForearmAcceleration.labels(device_id=self.id, device_type=self.type, station=self.host).set(jogJoints[6]) if self.isEnabled("jogendeffectoracceleration"): Dobot.jogEndEffectorAcceleration.labels(device_id=self.id, device_type=self.type, station=self.host).set(jogJoints[7]) if self.GetJOGCoordinateParams: jogCoords = dTypeX.GetJOGCoordinateParams(self.api) if self.isEnabled("jogaxisxvelocity"): Dobot.jogAxisXVelocity.labels(device_id=self.id, device_type=self.type, station=self.host).set(jogCoords[0]) if self.isEnabled("jogaxisyvelocity"): Dobot.jogAxisYVelocity.labels(device_id=self.id, device_type=self.type, station=self.host).set(jogCoords[1]) if self.isEnabled("jogaxiszvelocity"): Dobot.jogAxisZVelocity.labels(device_id=self.id, device_type=self.type, station=self.host).set(jogCoords[2]) if self.isEnabled("jogaxisrvelocity"): Dobot.jogAxisRVelocity.labels(device_id=self.id, device_type=self.type, station=self.host).set(jogCoords[3]) if self.isEnabled("jogaxisxacceleration"): Dobot.jogAxisXAcceleration.labels(device_id=self.id, device_type=self.type, station=self.host).set(jogCoords[4]) if self.isEnabled("jogaxisyacceleration"): Dobot.jogAxisYAcceleration.labels(device_id=self.id, device_type=self.type, station=self.host).set(jogCoords[5]) if self.isEnabled("jogaxiszacceleration"): Dobot.jogAxisZAcceleration.labels(device_id=self.id, device_type=self.type, station=self.host).set(jogCoords[6]) if self.isEnabled("jogaxisracceleration"): Dobot.jogAxisRAcceleration.labels(device_id=self.id, device_type=self.type, station=self.host).set(jogCoords[7]) if self.GetJOGCommonParams: jogCommon = dTypeX.GetJOGCommonParams(self.api) if self.isEnabled("jogvelocityratio"): Dobot.jogVelocityRatio.labels(device_id=self.id, device_type=self.type, station=self.host).set(jogCommon[0]) if self.isEnabled("jogaccelerationratio"): Dobot.jogAccelerationRatio.labels(device_id=self.id, device_type=self.type, station=self.host).set(jogCommon[1]) if self.GetPTPJointParams: ptpJoints = dTypeX.GetPTPJointParams(self.api) if self.isEnabled("ptpbasevelocity"): Dobot.ptpBaseVelocity.labels(device_id=self.id, device_type=self.type, station=self.host).set(ptpJoints[0]) if self.isEnabled("ptpreararmvelocity"): Dobot.ptpRearArmVelocity.labels(device_id=self.id, device_type=self.type, station=self.host).set(ptpJoints[1]) if self.isEnabled("ptpforearmvelocity"): Dobot.ptpForearmVelocity.labels(device_id=self.id, device_type=self.type, station=self.host).set(ptpJoints[2]) if self.isEnabled("ptpendeffectorvelocity"): Dobot.ptpEndEffectorVelocity.labels(device_id=self.id, device_type=self.type, station=self.host).set(ptpJoints[3]) if self.isEnabled("ptpbaseacceleration"): Dobot.ptpBaseAcceleration.labels(device_id=self.id, device_type=self.type, station=self.host).set(ptpJoints[4]) if self.isEnabled("ptpreararmacceleration"): Dobot.ptpRearArmAcceleration.labels(device_id=self.id, device_type=self.type, station=self.host).set(ptpJoints[5]) if self.isEnabled("ptpforearmacceleration"): Dobot.ptpForearmAcceleration.labels(device_id=self.id, device_type=self.type, station=self.host).set(ptpJoints[6]) if self.isEnabled("ptpendeffectoracceleration"): Dobot.ptpEndEffectorAcceleration.labels(device_id=self.id, device_type=self.type, station=self.host).set(ptpJoints[7]) if self.GetPTPCoordinateParams: ptpCoords = dTypeX.GetPTPCoordinateParams(self.api) if self.isEnabled("ptpaxisxyzvelocity"): Dobot.ptpAxisXYZVelocity.labels(device_id=self.id, device_type=self.type, station=self.host).set(ptpCoords[0]) if self.isEnabled("ptpaxisrvelocity"): Dobot.ptpAxisRVelocity.labels(device_id=self.id, device_type=self.type, station=self.host).set(ptpCoords[1]) if self.isEnabled("ptpaxisxyzacceleration"): Dobot.ptpAxisXYZAcceleration.labels(device_id=self.id, device_type=self.type, station=self.host).set(ptpCoords[2]) if self.isEnabled("ptpaxisracceleration"): Dobot.ptpAxisRAcceleration.labels(device_id=self.id, device_type=self.type, station=self.host).set(ptpCoords[3]) if self.GetPTPCommonParams: ptpCommon = dTypeX.GetPTPCommonParams(self.api) if self.isEnabled("ptpvelocityratio"): Dobot.ptpVelocityRatio.labels(device_id=self.id, device_type=self.type, station=self.host).set(ptpCommon[0]) if self.isEnabled("ptpaccelerationratio"): Dobot.ptpAccelerationRatio.labels(device_id=self.id, device_type=self.type, station=self.host).set(ptpCommon[1]) if self.GetPTPJumpParams: ptpJump = dTypeX.GetPTPJumpParams(self.api) if self.isEnabled("liftingheight"): Dobot.liftingHeight.labels(device_id=self.id, device_type=self.type, station=self.host).set(ptpJump[0]) if self.isEnabled("heightlimit"): Dobot.heightLimit.labels(device_id=self.id, device_type=self.type, station=self.host).set(ptpJump[1]) if self.GetCPParams: cp = dTypeX.GetCPParams(self.api) if self.isEnabled("cpvelocity"): Dobot.cpVelocity.labels(device_id=self.id, device_type=self.type, station=self.host).set(cp[0]) if self.isEnabled("cpacceleration"): Dobot.cpAcceleration.labels(device_id=self.id, device_type=self.type, station=self.host).set(cp[1]) if self.GetARCParams: arc = dTypeX.GetARCParams(self.api) if self.isEnabled("arcxyzvelocity"): Dobot.arcXYZVelocity.labels(device_id=self.id, device_type=self.type, station=self.host).set(arc[0]) if self.isEnabled("arcrvelocity"): Dobot.arcRVelocity.labels(device_id=self.id, device_type=self.type, station=self.host).set(arc[1]) if self.isEnabled("arcxyzacceleration"): Dobot.arcXYZAcceleration.labels(device_id=self.id, device_type=self.type, station=self.host).set(arc[2]) if self.isEnabled("arcracceleration"): Dobot.arcRAcceleration.labels(device_id=self.id, device_type=self.type, station=self.host).set(arc[3]) if self.GetAngleSensorStaticError: angleStaticErr = dTypeX.GetAngleSensorStaticError(self.api) if self.isEnabled("anglestaticerrrear"): Dobot.angleStaticErrRear.labels(device_id=self.id, device_type=self.type, station=self.host).set(angleStaticErr[0]) if self.isEnabled("anglestaticerrfront"): Dobot.angleStaticErrFront.labels(device_id=self.id, device_type=self.type, station=self.host).set(angleStaticErr[1]) if self.GetAngleSensorCoef: angleCoef = dTypeX.GetAngleSensorCoef(self.api) if self.isEnabled("anglecoefrear"): Dobot.angleCoefRear.labels(device_id=self.id, device_type=self.type, station=self.host).set(angleCoef[0]) if self.isEnabled("anglecoeffront"): Dobot.angleCoefFront.labels(device_id=self.id, device_type=self.type, station=self.host).set(angleCoef[1]) if self.isEnabled("slidingrailstatus"): if bool(dTypeX.GetDeviceWithL(self.api)[0]): Dobot.slidingRailStatus.labels(device_id=self.id, device_type=self.type, station=self.host).state("enabled") else: Dobot.slidingRailStatus.labels(device_id=self.id, device_type=self.type, station=self.host).state("disabled") if self.isEnabled("slidingrailpose"): Dobot.slidingRailPose.labels(device_id=self.id, device_type=self.type, station=self.host).set(dTypeX.GetPoseL(self.api)[0]) if self.GetJOGLParams: jogRail = dTypeX.GetJOGLParams(self.api) if self.isEnabled("slidingrailjogvelocity"): Dobot.slidingRailJogVelocity.labels(device_id=self.id, device_type=self.type, station=self.host).set(jogRail[0]) if self.isEnabled("slidingrailjogacceleration"): Dobot.slidingRailJogAcceleration.labels(device_id=self.id, device_type=self.type, station=self.host).set(jogRail[1]) if self.GetPTPLParams: ptpRail = dTypeX.GetPTPLParams(self.api) if self.isEnabled("slidingrailptpvelocity"): Dobot.slidingRailPtpVelocity.labels(device_id=self.id, device_type=self.type, station=self.host).set(ptpRail[0]) if self.isEnabled("slidingrailptpacceleration"): Dobot.slidingRailPtpAcceleration.labels(device_id=self.id, device_type=self.type, station=self.host).set(ptpRail[1]) if self.isEnabled("wifimodulestatus"): if bool(dTypeX.GetWIFIConfigMode(self.api)[0]): Dobot.wifiModuleStatus.labels(device_id=self.id, device_type=self.type, station=self.host).state("enabled") else: Dobot.wifiModuleStatus.labels(device_id=self.id, device_type=self.type, station=self.host).state("disabled") if self.isEnabled("wificonnectionstatus"): if bool(dTypeX.GetWIFIConnectStatus(self.api)[0]): Dobot.wifiConnectionStatus.labels(device_id=self.id, device_type=self.type, station=self.host).state("enabled") else: Dobot.wifiConnectionStatus.labels(device_id=self.id, device_type=self.type, station=self.host).state("disabled") def disconnect(self): dTypeX.DisconnectDobotX(self.api)
api = SynologyDSM(url, port, usr, password, timeout=60) start_http_server(9999) set_static_info(api) temp_gauge = Gauge(metric("temperature"), "Temperature") uptime_gauge = Gauge(metric("uptime"), "Uptime") cpu_gauge = Gauge(metric("cpu_load"), "DSM version") memory_used_gauge = Gauge(metric("memory_used"), "Total memory used") memory_total_gauge = Gauge(metric("memory_total"), "Total memory") network_up_gauge = Gauge(metric("network_up"), "Network up") network_down_gauge = Gauge(metric("network_down"), "Network down") volume_status_enum = Enum(metric("volume_status"), "Status of volume", labelnames=["Volume_ID"], states=["normal"]) volume_size_gauge = Gauge(metric("volume_size"), "Size of volume", ["Volume_ID"]) volume_size_used_gauge = Gauge(metric("volume_size_used"), "Used size of volume", ["Volume_ID"]) s_status_enum = Enum(metric("disk_smart_status"), "Smart status about disk", labelnames=["Disk_ID", "Disk_name"], states=["normal"]) status_enum = Enum(metric("disk_status"), "Status about disk", labelnames=["Disk_ID","Disk_name"], states=["normal"]) disk_name_info = Info(metric("disk_status"), "Name of disk", ["Disk_ID", "Disk_name"]) disk_temp_gauge = Gauge(metric("disk_temp"), "Temperature of disk", ["Disk_ID", "Disk_name"]) while True: api.utilisation.update() api.information.update() api.storage.update() api.share.update() # api.update(with_information=True) general_info(api, temp_gauge, uptime_gauge, cpu_gauge)
def test_enum(self): i = Enum('ee', 'An enum', ['a', 'b'], registry=self.registry, states=['foo', 'bar']) i.labels('c', 'd').state('bar') self.assertEqual(b'# HELP ee An enum\n# TYPE ee gauge\nee{a="c",b="d",ee="foo"} 0.0\nee{a="c",b="d",ee="bar"} 1.0\n', generate_latest(self.registry))
FAILED_CAPACITY_QUERIES = Counter( "packet_capacity_query_failures", "Total number of failures to fetch spot market prices", ) SUCCESSFUL_CAPACITY_SCRAPES = Counter( "packet_capacity_scrapes_total", "Total number of capacity scrapes", ) CAPACITY_REQUEST_TIME = Histogram( "packet_capacity_query_duration", "Time spent requesting capacity data", ) CAPACITY = Enum( "packet_capacity", "Current plan capacity by facility.", ["plan", "facility"], states=["unavailable", "limited", "normal"], ) FAILED_SPOT_QUERIES = Counter( "packet_spot_market_price_query_failures", "Total number of failures to fetch spot market prices", ) SUCCESSFUL_SPOT_SCRAPES = Counter( "packet_spot_market_price_scrapes_total", "Total number of spot market price scrapes", ) SPOT_REQUEST_TIME = Histogram( "packet_spot_market_price_query_duration", "Time spent requesting spot market prices", buckets=(
class BroadcastWebsocketStats: def __init__(self, local_hostname, remote_hostname): self._local_hostname = local_hostname self._remote_hostname = remote_hostname self._registry = CollectorRegistry() # TODO: More robust replacement self.name = safe_name(self._local_hostname) self.remote_name = safe_name(self._remote_hostname) self._messages_received_total = Counter( f'awx_{self.remote_name}_messages_received_total', 'Number of messages received, to be forwarded, by the broadcast websocket system', registry=self._registry, ) self._messages_received = Gauge( f'awx_{self.remote_name}_messages_received', 'Number forwarded messages received by the broadcast websocket system, for the duration of the current connection', registry=self._registry, ) self._connection = Enum(f'awx_{self.remote_name}_connection', 'Websocket broadcast connection', states=['disconnected', 'connected'], registry=self._registry) self._connection.state('disconnected') self._connection_start = Gauge( f'awx_{self.remote_name}_connection_start', 'Time the connection was established', registry=self._registry) self._messages_received_per_minute = Gauge( f'awx_{self.remote_name}_messages_received_per_minute', 'Messages received per minute', registry=self._registry) self._internal_messages_received_per_minute = FixedSlidingWindow() def unregister(self): self._registry.unregister(f'awx_{self.remote_name}_messages_received') self._registry.unregister(f'awx_{self.remote_name}_connection') def record_message_received(self): self._internal_messages_received_per_minute.record() self._messages_received.inc() self._messages_received_total.inc() def record_connection_established(self): self._connection.state('connected') self._connection_start.set_to_current_time() self._messages_received.set(0) def record_connection_lost(self): self._connection.state('disconnected') def get_connection_duration(self): return (datetime.datetime.now() - self._connection_established_ts).total_seconds() def render(self): msgs_per_min = self._internal_messages_received_per_minute.render() self._messages_received_per_minute.set(msgs_per_min) def serialize(self): self.render() registry_data = generate_latest(self._registry).decode('UTF-8') return registry_data
def getGardenCallback(self, err, res): if err: print err else: if configuration["influxdb"]["enabled"]: influxDbClient = InfluxDBClient( configuration["influxdb"]["server"], configuration["influxdb"]["port"], configuration["influxdb-username"], configuration["influxdb"]["password"], configuration["influxdb"]["database"]) try: influxDbClient.create_database( configuration["influxdb"]["database"]) except InfluxDBClientError, ex: print "InfluxDBClientError", ex influxDbClient.create_retention_policy( configuration["influxdb"]["policy"], 'INF', 3, default=True) for location in res["locations"]: #print json.dumps(location, indent=2, sort_keys=True) sensorId = location["sensor"]["sensor_identifier"][-4:].lower() flower = {} #flower["sensor_name"] = location["sensor"]["sensor_identifier"] if location["battery"]["gauge_values"][ "current_value"] is not None: flower["battery"] = ( "Battery", int(location["battery"]["gauge_values"] ["current_value"])) if location["air_temperature"]["gauge_values"][ "current_value"] is not None: flower["air_temperature"] = ( "Temperature", float(location["air_temperature"]["gauge_values"] ["current_value"])) flower["air_temperature_status"] = [ "Temperature Status", str(location["air_temperature"] ["instruction_key"]).replace( "air_temperature_", ""), ["good", "too_low", "too_high"] ] if location["fertilizer"]["gauge_values"][ "current_value"] is not None: flower["fertilizer"] = ( "Fertilizer", float(location["fertilizer"]["gauge_values"] ["current_value"])) flower["fertilizer_status"] = [ "Fertilizer Status", str(location["fertilizer"]["instruction_key"]).replace( "fertilizer_", ""), ["good", "too_low", "too_high"] ] if location["light"]["gauge_values"][ "current_value"] is not None: flower["light"] = ("Light", float(location["light"]["gauge_values"] ["current_value"])) flower["light_status"] = [ "Light Status", str(location["light"]["instruction_key"]).replace( "light_", ""), ["good", "too_low", "too_high"] ] if location["watering"]["soil_moisture"]["gauge_values"][ "current_value"] is not None: flower["watering"] = ( "Moisture", float(location["watering"]["soil_moisture"] ["gauge_values"]["current_value"])) flower["watering_status"] = [ "Moisture Status", str(location["watering"]["soil_moisture"] ["instruction_key"]).replace("soil_moisture_", ""), ["good", "too_low", "too_high"] ] lastUtc = ("Updated", str(location["last_sample_utc"])) if configuration["mqtt"]["enabled"]: print "Pushing Mqtt", sensorId, ":", configuration["mqtt"][ "prefix"], flower try: broadcastMqtt(configuration["mqtt"]["client"], configuration["mqtt"]["server"], configuration["mqtt"]["port"], configuration["mqtt"]["prefix"], sensorId + "/update", json.dumps(taflowerg)) except Exception, ex: print "Error on mqtt broadcast", ex if configuration["prometheuspush"]["enabled"]: registry = CollectorRegistry() for key in flower.keys(): print "Pushing", sensorId, ":", configuration[ "prometheuspush"][ "prefix"] + '_' + key + '_total', "=", flower[ key] if flower[key][1] is None: continue elif type(flower[key][1]) is str: e = Enum( configuration["prometheuspush"]["prefix"] + '_' + key + '_total', flower[key][0], ['sensorid'], states=flower[key][2], registry=registry) e.labels(sensorid=sensorId).state(flower[key][1]) else: g = Gauge( configuration["prometheuspush"]["prefix"] + '_' + key + '_total', flower[key][0], ['sensorid'], registry=registry) g.labels(sensorid=sensorId).set(flower[key][1]) print "Pushing", sensorId, ":", configuration[ "prometheuspush"][ "prefix"] + '_' + key + '_total', "=", flower[key] try: push_to_gateway( configuration["prometheuspush"]["server"] + ":" + configuration["prometheuspush"]["port"], job=configuration["prometheuspush"]["client"] + "_" + sensorId, registry=registry) except: print "Prometheus not available" if configuration["influxdb"]["enabled"]: influxDbJson = [{ "measurement": configuration["influxdb"]["prefix"], "tags": { "sensor": sensorId, }, "time": lastUtc[1], "fields": {} }] for key in flower.keys(): influxDbJson[0]["fields"][key] = flower[key][1] print "Pushing", influxDbJson try: influxDbClient.write_points( influxDbJson, retention_policy=configuration["influxdb"] ["policy"]) except: print "Influxdb not available"
class Command(BaseCommand): help = 'This command is used to ingest data from local disk cache' def __init__(self, *args, **kwargs): super(Command, self).__init__(*args, **kwargs) self.source_ingest = Gauge("total_ingest_count", "Total number of articles ingested", ['source', 'category']) self.task_state = Enum("article_ingestion_state", "Article ingestion states", states=["running", "waiting"]) self.now = datetime.now( pytz.timezone("Asia/Kolkata")).strftime("%Y-%m-%d") self.redis = redis.Redis() self.batch = [] self.sleep_time = 0 self.classify = RegexClassification() self.score = ArticleScore() def reset_stats(self): """ this method is used to reset stats to zero """ print("Resetting Stats") for metric in self.source_ingest._metrics.keys(): source, category = metric self.source_ingest(source=source, category=category).set(0) def add_arguments(self, parser): parser.add_argument('--source', '-s', type=str, help='redis source name [Ex: theverge]') parser.add_argument( '--index', '-i', type=str, default='article', help='elastic search index name [default: article]') def get_data_from_redis(self, source): """ this method returns data from redis """ return self.redis.lpop(source) def parse_date(self, date_str): try: dt = parse(date_str) return dt.astimezone(tz=pytz.UTC) except Exception: try: ts = int(date_str) return datetime.utcfromtimestamp(ts) except Exception: return None def remove_char(self, tag, ch): """ this method removes given char from tag """ new_tag = [tag] if ch in tag: return tag.split(ch) return new_tag def remove_special_chars(self, tags): """ this method is used to remove special chars from tags """ new_tags = [] for tag in tags: new_tags = new_tags + self.remove_char(tag, ";") clean_tags = [] for tag in new_tags: clean_tags = clean_tags + self.remove_char(tag, " & ") final_tags = [] for tag in clean_tags: final_tags = final_tags + self.remove_char(tag, " and ") final_tags = [ tag.replace("&", " ").replace(",", "").replace(":", "").replace( "'", "").replace("#", "").replace("*", "").replace("(", "").replace( ")", "").replace("@", "").replace("!", "").replace("-", " ").strip().lower() for tag in final_tags ] return final_tags def get_tags(self, tags): """ this method will return tag name from tags objects """ tag_list = [] for tag in tags: tag_list.append(tag["name"]) return tag_list def create_model_obj(self, doc, index): """ this method is used to create django article model object """ title = doc["title"] category = doc["category"] source = doc["source"] source_url = doc["source_url"] cover_image = doc["cover_image"] blurb = doc["blurb"] full_text = doc.get("short_description") or doc.get("full_text", "") published_on = self.parse_date(doc["published_on"]) if not published_on: published_on = timezone.now() author = doc.get("author", "") author_twitter = doc.get("author_twitter", "") video_data = doc.get("video_data", "") images = doc["images"] tags = doc["tags"] if not cover_image: if video_data: cover_image = video_data[0].get("video_image", "") if title and full_text: if not Article.objects.filter(title=title).exists(): if category == "Uncategorised": # apply regex based category only if article is uncategorised # get category id from regex classfication category_id = self.classify.match(title) category = Category.objects.get(id=category_id) else: category = Category.objects.get(name=category) source, _ = Source.objects.get_or_create(name=source) article_obj = Article.objects.create(title=title, source=source, category=category, source_url=source_url, cover_image=cover_image, blurb=blurb, full_text=full_text, published_on=published_on, active=True) if len(images) > 1: for img in images: _ = ArticleMedia.objects.create(article=article_obj, category="image", url=img) if len(video_data) > 0: for video_dic in video_data: _ = ArticleMedia.objects.create( article=article_obj, category="video", url=video_dic.get("video_image", ""), video_url=video_dic.get("video_url", "")) if len(tags) > 0: tag_objs = [] new_tags = self.remove_special_chars(tags) if new_tags: for tag in new_tags: tag_obj = HashTag.objects.filter(name=tag) if tag_obj: tag_objs.append(tag_obj.first()) else: tag_obj = HashTag.objects.create(name=tag) tag_objs.append(tag_obj) article_obj.hash_tags.add(*tag_objs) # calculate article score score = self.score.calculate_score(doc) serializer = ArticleSerializer(article_obj) json_data = serializer.data json_data["article_score"] = score if json_data["hash_tags"]: tag_list = self.get_tags(json_data["hash_tags"]) json_data["hash_tags"] = tag_list self.batch.append(json_data) if len(self.batch) == 99: ingest_to_elastic(self.batch, index, index, 'id') self.batch = [] print("Ingesting Batch To Elastic...!!!") def handle(self, *args, **options): if options['source'] == None: raise CommandError("Option `--source=...` must be specified.") # start prometheus http server for metrics start_http_server(8686) source = options['source'] index = options['index'] create_index(index) try: while True: file_path = self.get_data_from_redis(source) if file_path: date = datetime.now( pytz.timezone("Asia/Kolkata")).strftime("%Y-%m-%d") self.task_state.state("running") self.sleep_time = 0 if os.path.isfile(file_path): doc = cPickle.loads( zlib.decompress(open(file_path).read())) try: self.create_model_obj(doc, index) if date == self.now: self.source_ingest.labels( source=doc.get("source", "source"), category=doc.get("category", "category")).inc() else: self.now = datetime.now( pytz.timezone("Asia/Kolkata")).strftime( "%Y-%m-%d") # self.reset_stats() self.source_ingest.labels( source=doc.get("source", "source"), category=doc.get("category", "category")).inc() except Exception as e: print("error in doc read") print(e) else: msg = "Data file not found: {0}".format(file_path) print(msg) else: self.task_state.state("waiting") print("Sleeping...!!!") time.sleep(10) self.sleep_time += 10 if self.sleep_time >= 60: if self.batch: ingest_to_elastic(self.batch, index, index, 'id') print("Ingesting Final Batch...!!!") self.batch = [] self.sleep_time = 0 except KeyboardInterrupt: sys.exit(0)
Gauge("active_webhook_connections", "active webhook connections"), "active_requests": Gauge("active_requests", "active requests"), "active_network_queries": Gauge("active_network_queries", "active network queries"), "request_count": Gauge("request", "request/s count", labelnames=('duration', 'bot')), "request_file_count": Gauge('request_file', 'request/s file', labelnames=('duration', )), "response_count": Gauge('response', "response/s", labelnames=("duration", 'type')), 'update_count': Gauge('update', "update/s", labelnames=("duration", 'bot')), "has_custom_certificate": Enum("has_custom_certificate", "has custom certificate", states=['true', 'false'], labelnames=('bot', )), "webhook_max_connections": Gauge("webhook_max_connections", "webhook max connections", labelnames=('bot', )), } BLACKLIST = [ 'token', 'request_bytes', 'request_files_bytes', 'request_max_bytes', 'webhook', 'id', 'rss', 'vm', 'rss_peak', 'vm_peak', 'response_bytes', 'head_update_id', 'tail_update_id', 'pending_update_count', 'buffer_memory' ] REPLACE = { "request_count/sec": "request_count", "update_count/sec": "update_count",
def make_enum(name, documentation, state_cls): return Enum(name, documentation, labels, states=enum_values(state_cls), registry=registry)
'pup_advisor_inventory_post_success', 'The total amount of successful inventory posts') inventory_post_failure = Counter('pup_advsior_inventory_post_failure', 'The total amount of failed inventory posts') validation_time = Summary('validation_processing_seconds', 'Time spent validating archive') inventory_post_time = Summary('inventory_post_processing_seconds', 'Time spent posting to inventory') handle_file_time = Summary('pup_handle_file_time', 'Time spent executing handle_file') extract_facts_time = Summary('pup_extract_facts_time', 'Time spent extracting facts') payload_size = Summary('pup_payload_size', 'Size in bytes of processed payloads') upload_service_version = Info('upload_service_version', 'Build commit and date') produce_queue_size = Gauge("pup_produce_queue_size", "Size of the produce queue") system_profile_queue_size = Gauge("pup_system_profile_queue_size", "Size of the system profile queue") current_archives_size = Gauge("pup_current_archives_size", "Number of archives processing currently") task_status = Enum("pup_task_stat", "The state of each async task", labelnames=["task_name"], states=["running", "failed", "done"])
listen_port = args.port sleep_time = args.frequency code = args.code sp = None # Send command and receive reply # Create a metric to track time spent and requests made. # Gaugage: it goes up and down, snapshot of state REQUEST_POWER = Gauge('w125_power_watt', 'DSP-W125 Watt measure') REQUEST_TEMP = Gauge('w125_temperature', 'DSP-W125 Temperature measure') REQUEST_TOTAL = Gauge('w125_total', 'DSP-W125 Total energy measure') REQUEST_STATE = Enum('w125_state', 'DSP-W125 switch status', states=['ON', 'OFF', 'unknown']) REQUEST_POWER.set_function(lambda: get_power()) REQUEST_TEMP.set_function(lambda: get_temp()) REQUEST_TOTAL.set_function(lambda: get_total()) def get_state(): """ Get W125 switch state """ return sp.state def get_power(): """ Get W125 power """ val = sp.current_consumption
import os from datetime import datetime from prometheus_client import (Enum, Gauge, Histogram, Summary, start_http_server) ZCHA_BLOCK_URL = "https://api.zcha.in/v2/mainnet/blocks/" ZCHA_BLOCK_HEIGHT_PORT = Enum('zcha_block_height_check', 'ZCHA Block Height Check', states=['1', '0']) ZCHA_LAST_BLOCK_CHECK_PORT = Enum('zcha_last_block_check', 'ZCHA LAST BLOCK CHECK', states=['1', '0']) ZCASHNETWORKINFO_BLOCK_URL = "https://zcashnetwork.info/api/block/" ZCASHNETWORKINFO_BLOCK_HEIGHT_PORT = Enum( 'zcashnetworkinfo_block_height_check', 'ZCASHNETWORKINFO Block Height Check', states=['1', '0']) ZCASHNETWORKINFO_LAST_BLOCK_CHECK_PORT = Enum( 'zcashnetworkinfo_last_block_check', 'ZCASHNETWORKINFO LAST BLOCK CHECK', states=['1', '0']) CHAINSO_BLOCK_URL = "https://chain.so/api/v2/block/ZEC/" CHAINSO_BLOCK_HEIGHT_PORT = Enum('chainso_block_height_check', 'CHAINSO Block Height Check', states=['1', '0']) CHAINSO_LAST_BLOCK_CHECK_PORT = Enum('chainso_last_block_check', 'CHAINSO LAST BLOCK CHECK', states=['1', '0'])
import os from datetime import datetime from prometheus_client import (Enum, Gauge, Histogram, Summary, start_http_server) # Exmo Configs EXMO_URL = "https://api.exmo.com/v1/ticker/" EXMO_SPOT_PRICE_USD_PORT = Enum('exmo_spot_price_usd_check', 'Exmo Spot Price USD Check', states=['1', '0']) EXMO_SPOT_PRICE_BTC_PORT = Enum('exmo_spot_price_btc_check', 'Exmo Spot Price BTC Check', states=['1', '0']) EXMO_TRANSACTION_VOLUME_USD_PORT = Enum( 'exmo_transaction_volume_usd_check', 'Exmo Transaction Volume USD Check', states=['1', '0']) EXMO_TRANSACTION_VOLUME_BTC_PORT = Enum( 'exmo_transaction_volume_btc_check', 'Exmo Transaction Volume BTC Check', states=['1', '0']) EXMO_SPOT_PRICE_MEDIAN_DEVIATION_PORT = Histogram( 'exmo_spot_price_deviation_check', 'Exmo Spot Price Deviation Check') EXMO_SUMMARY = Summary('request_latency_seconds', 'Description of summary') EXMO_USD_SPOT_PRICE_DEVIATION_GAUGE = Gauge('exmo_usd_spot_price_deviation_gauge', 'exmo_usd_spot_price_deviation_gauge gauge') EXMO_BTC_SPOT_PRICE_DEVIATION_GAUGE = Gauge('exmo_btc_spot_price_deviation_gauge', 'exmo_btc_spot_price_deviation_gauge gauge') EXMO_USD_SPOT_PRICE_ABSOLUTE = Gauge( 'exmo_usd_spot_price_absolute', 'exmo_usd_spot_price_absolute_gauge') EXMO_BTC_SPOT_PRICE_ABSOLUTE = Gauge( 'exmo_btc_spot_price_absolute', 'exmo_btc_spot_price_absolute_gauge') EXMO_USD_TRANSACTION_VOLUME_ABSOLUTE = Gauge( 'exmo_usd_transaction_volume_absolute', 'exmo_usd_transaction_volume_absolute_gauge') EXMO_BTC_TRANSACTION_VOLUME_ABSOLUTE = Gauge( 'exmo_btc_transaction_volume_absolute', 'exmo_btc_transaction_volume_absolute_gauge')
import random import time from prometheus_client import CollectorRegistry, Enum, Gauge, pushadd_to_gateway ''' e = Enum('my_task_state', 'Description of enum', states=['starting', 'running', 'stopped']) e.state('running') ''' registry = CollectorRegistry() duration = Gauge('my_job_duration_seconds', 'Duration of my batch job in seconds', registry=registry) job_status = Enum('job_status', 'Job completion status', states=['complete', 'failed', 'running'], registry=registry) try: with duration.time(): time.sleep(random.random()) job_status.state(random.choice(['complete', 'failed', 'running'])) pass # This only runs if there wasn't an exception g = Gauge('my_job_last_success_seconds', 'Last time my batch job successfully finished', registry=registry) g.set_to_current_time() finally: pushadd_to_gateway('localhost:9091', job='my-batch-job', registry=registry)
class Monitor(object): info = Info("worker", "worker information") err = Info("worker_error", "worker error") counter = Counter( "worker_operation", "counter for worker operations", ["name", "operation", "topic"], ) state = Enum( "worker_state", "state of worker", ["name"], states=["starting", "running", "stopped"], ) process_timer = Histogram("process_time_seconds", "Process time (seconds)") def __init__(self, worker: "Worker", port: int = 8000) -> None: self.worker = worker self.state.labels(name=worker.name).state("starting") self.port = port def expose(self) -> None: start_http_server(self.port) def record_worker_info(self) -> None: self.info.info({ "name": self.worker.name, "version": self.worker.version, "description": self.worker.description, "in-topic": self.worker.source.topic if self.worker.has_input() else None, "out-topic": self.worker.destination.topic if self.worker.has_output() else None, }) def record_start(self) -> None: self.info.info({ "name": self.worker.name, "time": str(time.time()), "event": "start", }) self.state.labels(name=self.worker.name).state("running") def record_finish(self) -> None: self.info.info({ "name": self.worker.name, "time": str(time.time()), "event": "finish", }) self.state.labels(name=self.worker.name).state("stopped") def record_error(self, msg: str) -> None: self.err.info({ "name": self.worker.name, "time": str(time.time()), "event": "error", "details": msg, }) self.counter.labels(name=self.worker.name, operation="error", topic=None).inc() def record_write(self, topic: str = None) -> None: self.counter.labels(name=self.worker.name, operation="write", topic=topic).inc() def record_read(self, topic: str = None) -> None: self.counter.labels(name=self.worker.name, operation="read", topic=topic).inc()
#h_transaction.observe(1) #h_transaction.observe(0.5) #h_transaction.observe(1.1) #h_transaction.observe(13.2) @h_transaction.time() def f(): pass with h_transaction.labels(method='POST',operation='transaction').time(): pass #from prometheus_client import Enum e = Enum('app_status', 'Application status ', states=['starting', 'running', 'stopped']) e.state('running') # g_transaction_last_seen = Gauge('transaction_last_seen', 'last seen transactions ',['method','operation']) g_enroll_last_seen = Gauge('enroll_last_seen', 'last seen enroll ',['method','operation']) # start the http server to expose the prometheus metrics logging.info("Starting web-server...") start_http_server(metrics_port, "0.0.0.0") logging.info("Server started and listening at 0.0.0.0:{0}".format(metrics_port))
import argparse import os import time from urllib.parse import urlparse import requests from requests.exceptions import RequestException import yaml from prometheus_client import Counter, Enum, Gauge, Summary from prometheus_client.exposition import start_http_server # Create a metric to track time spent and requests made. REQUEST_TIME = Summary('request_processing_seconds', 'Time spent processing request', ['alias']) REQUEST_CODE = Counter('request_codes', 'Request status code', ['alias', 'code']) STATE = Enum('resource_state', 'Description of enum', ['alias', 'name'], states=['available', 'locked', 'reserved']) LABELS = Gauge('available_labels', "Available labels", ['alias', 'label', 'state']) def load_yaml_config(file_name): yaml_object = None try: with open(file_name, 'r') as yaml_file: yaml_object = yaml.load(yaml_file, Loader=yaml.SafeLoader) except FileNotFoundError: print(f"{file_name} does not exist") return yaml_object def process_request(alias, url, user=None, token=None, verify=True):
# -*- coding : utf-8 import requests from flask import Response, Flask from prometheus_client import Enum, CollectorRegistry, generate_latest app = Flask(__name__) REGISTRY = CollectorRegistry(auto_describe=False) e = Enum('namenode_state', 'namenode status', states=['running', 'stopped']) e.state('running') class hbase_exporter(object): def __init__(self, url): self.url = url self.session = requests.session().get(self.url) def get_averageLoad(self): request = self.session.json() load = request['beans'] return load def get_RitCount(self): request = self.session.json() return request def getHeapUtilization(self): return
import sys import re import logging from sets import Set from prometheus_client import start_http_server, Enum import boto3 MONITOR_NAME = "sre-stuck-ebs-volume" PROJECT = "openshift-monitoring" VALID_STATES = ["attaching", "attached", "detaching", "detached"] #clusterid is included to better support future Prometheus federation. VOLUME_STATE = Enum('ebs_volume_state', 'EBS Volume state', ["vol_name", "clusterid"], states=VALID_STATES) # A list (implemented as a Set) of all non-deleted volumes. # After we get a list of all volume IDs from our running instances we will run # a query against the API for the volumes we know about and prune as needed. ACTIVE_VOLUMES = Set([]) def normalize_prometheus_label(str): """ Prometheus labels must match /[a-zA-Z_][a-zA-Z0-9_]*/ and so we should coerce our data to it. Source: https://prometheus.io/docs/concepts/data_model/ Every invalid character will be made to be an underscore `_`. """ return re.sub(r'[^[a-zA-Z_][a-zA-Z0-9_]*]', "_", str, 0)
def initialize_prometheus_exporter(ursula, listen_address: str, port: NETWORK_PORT, metrics_prefix: str) -> None: from prometheus_client.twisted import MetricsResource from twisted.web.resource import Resource from twisted.web.server import Site from .json_metrics_export import JSONMetricsResource node_metrics = { "known_nodes_gauge": Gauge(f'{metrics_prefix}_known_nodes', 'Number of currently known nodes'), "work_orders_gauge": Gauge(f'{metrics_prefix}_work_orders', 'Number of accepted work orders'), "missing_commitments_gauge": Gauge(f'{metrics_prefix}_missing_commitments', 'Currently missed commitments'), "learning_status": Enum(f'{metrics_prefix}_node_discovery', 'Learning loop status', states=['starting', 'running', 'stopped']), "eth_balance_gauge": Gauge(f'{metrics_prefix}_staker_eth_balance', 'Ethereum balance'), "token_balance_gauge": Gauge(f'{metrics_prefix}_staker_token_balance', 'NuNit balance'), "worker_eth_balance_gauge": Gauge(f'{metrics_prefix}_worker_eth_balance', 'Worker Ethereum balance'), "worker_token_balance_gauge": Gauge(f'{metrics_prefix}_worker_token_balance', 'Worker NuNit balance'), "requests_counter": Counter(f'{metrics_prefix}_http_failures', 'HTTP Failures', ['method', 'endpoint']), "host_info": Info(f'{metrics_prefix}_host_info', 'Description of info'), "active_stake_gauge": Gauge(f'{metrics_prefix}_active_stake', 'Active stake'), "owned_tokens_gauge": Gauge( f'{metrics_prefix}_owned_tokens', 'All tokens that belong to the staker, including ' 'locked, unlocked and rewards'), "unlocked_tokens_gauge": Gauge(f'{metrics_prefix}_unlocked_tokens', 'Amount of unlocked tokens'), "available_refund_gauge": Gauge(f'{metrics_prefix}_available_refund', 'Available refund'), "policies_held_gauge": Gauge(f'{metrics_prefix}_policies_held', 'Policies held'), "current_period_gauge": Gauge(f'{metrics_prefix}_current_period', 'Current period'), "current_eth_block_number": Gauge(f'{metrics_prefix}_current_eth_block_number', 'Current Ethereum block'), "substakes_count_gauge": Gauge(f'{metrics_prefix}_substakes_count', 'Substakes count'), "current_worker_is_me_gauge": Gauge(f'{metrics_prefix}_current_worker_is_me', 'Current worker is me'), "worklock_deposited_eth_gauge": Gauge(f'{metrics_prefix}_worklock_current_deposited_eth', 'Worklock deposited ETH'), "worklock_remaining_work_gauge": Gauge(f'{metrics_prefix}_worklock_refund_remaining_work', 'Worklock remaining work'), "worklock_refund_completed_work_gauge": Gauge(f'{metrics_prefix}_worklock_refund_completedWork', 'Worklock completed work'), } event_collectors_config = get_staking_event_collectors_config(ursula, metrics_prefix) + \ get_worklock_event_collectors_config(ursula, metrics_prefix) + \ get_policy_event_collectors_config(ursula, metrics_prefix) event_metrics_collectors = build_event_metrics_collectors( ursula, event_collectors_config) if not ursula.federated_only: staking_agent = ContractAgency.get_agent(StakingEscrowAgent, registry=ursula.registry) node_metrics["current_worker_is_me_gauge"].set( staking_agent.get_worker_from_staker(ursula.checksum_address) == ursula.worker_address) # Scheduling metrics_task = task.LoopingCall( collect_prometheus_metrics, ursula=ursula, event_metrics_collectors=event_metrics_collectors, node_metrics=node_metrics) metrics_task.start(interval=10, now=False) # TODO: make configurable # WSGI Service root = Resource() root.putChild(b'metrics', MetricsResource()) root.putChild(b'json_metrics', JSONMetricsResource()) factory = Site(root) reactor.listenTCP(port, factory, interface=listen_address)
from bxgateway.utils.logging.status.summary import Summary from bxutils import logging from bxgateway import log_messages from bxutils.encoding.json_encoder import EnhancedJSONEncoder from bxgateway.utils.logging.status import summary as summary_status logger = logging.get_logger(__name__) STATUS_FILE_NAME = "gateway_status.log" CONN_TYPES = { ConnectionType.RELAY_BLOCK, ConnectionType.RELAY_TRANSACTION, ConnectionType.REMOTE_BLOCKCHAIN_NODE } gateway_status = Enum("gateway_status", "Gateway's online/offline status", states=[status.value for status in GatewayStatus]) def initialize(use_ext: bool, src_ver: str, ip_address: str, continent: str, country: str, update_required: bool, account_id: Optional[str], quota_level: Optional[int]) -> Diagnostics: current_time = _get_current_time() summary = Summary( gateway_status=GatewayStatus.OFFLINE, ip_address=ip_address, continent=continent, country=country, update_required=update_required, account_info=summary_status.gateway_status_get_account_info( account_id),
from prometheus_client import start_http_server, Gauge, Counter, Enum, Summary, Info CERTBOT_CERTS = Summary( "certbot_certs", "Total number of certificates managed by Lets Encrypt") CERTBOT_CERT_EXPIRY_SECONDS = Gauge( "certbot_certs_expiry_seconds", "Seconds until certificate expiry", labelnames=["name", "domains"], ) CERTBOT_CERT = Enum( "certbot_cert", "Status of certificate per ACME", states=[ "UNKNOWN", "PENDING", "PROCESSING", "VALID", "INVALID", "REVOKED", "READY" ], labelnames=["name", "domains"], ) CERTBOT_CERT_NAMES = Gauge( "certbot_cert_names", "Number of SANs (subject alternative names) in addition to the common name", labelnames=["name"], ) CERTBOT_CERT_STATUS = Info("certbot_cert_status", "Status of certificate per ACME", labelnames=["name"])
configuration["mqtt"]["server"], configuration["mqtt"]["port"], configuration["mqtt"]["prefix"], sensorId + "/update", json.dumps(flower)) except Exception, ex: print "Error on mqtt broadcast", ex if configuration["prometheuspush"]["enabled"]: registry = CollectorRegistry() for key in flower.keys(): if type(flower[key][1]) is str: if len(flower[key]) == 3: e = Enum(configuration["prometheuspush"]["prefix"] + '_' + key + '_total', flower[key][0], ['sensorid'], states=flower[key][2], registry=registry) e.labels(sensorid=sensorId).state(flower[key][1]) else: g = Gauge(configuration["prometheuspush"]["prefix"] + '_' + key + '_total', flower[key][0], ['sensorid'], registry=registry) g.labels(sensorid=sensorId).set(flower[key][1]) print "Pushing Prometheus", sensorId, ":", configuration["prometheuspush"]["prefix"] + '_' + key + '_total', "=", flower[key] try: push_to_gateway(configuration["prometheuspush"]["server"] + ":" + configuration["prometheuspush"]["port"], job=configuration["prometheuspush"]["client"] + "_" + sensorId,
import time bgp_global = { "bgp-v4-valid": Gauge("frr_bgp_valid_v4", "Count of Valid IPv4 Prefixes"), "bgp-v6-valid": Gauge("frr_bgp_valid_v6", "Count of Valid IPv6 Prefixes"), "bgp-peersUp": Gauge("frr_bgp_peersUp", "BGP Peers Up"), } bgp_af = { "bgp-peer-pfxRcd": Gauge("frr_bgp_peer_pfxRcd", "BGP Peer Prefixes Received", ["addrfam", "peerip", "asnum"]), "bgp-peer-state": Enum("frr_bgp_peer_state", "BGP Peer State", ["addrfam", "peerip", "asnum"], states=[ "Established", "Connect", "Active", "OpenSent", "Idle (Admin)", "Idle" ]), "bgp-peer-msgRcvd": Gauge("frr_bgp_peer_msgRcvd", "BGP Peer Messages Received", ["addrfam", "peerip", "asnum"]), "bgp-peer-msgSent": Gauge("frr_bgp_peer_msgSent", "BGP Peer Messages Sent", ["addrfam", "peerip", "asnum"]), "bgp-peer-peerUptime": Gauge("frr_bgp_peer_peerUptime", "BGP Peer Uptime", ["addrfam", "peerip", "asnum"]), } LATENCY = Summary("frr_respose_latency", "A daemon response time")