def get(self, company_id):
        """Get Company status"""

        now = datetime.datetime.utcnow()
        last_hr = now - datetime.timedelta(hours=1)
        _LOGGER.info(last_hr)
        company = Company.query.filter(Company.uid == company_id).filter(Company.deleted == False).first()
        total_sensors = Sensor.query.filter(Sensor.company_id == company.id).filter(Sensor.deleted == False).count()
        total_sensors_down = Sensor.query.filter(Sensor.company_id == company.id).filter(Sensor.deleted == False).filter(Sensor.is_down == True).count()
        alert_logs = tsdb.get_points_raw(
            ALERT_HISTORY_SERIES,
            tags={'company_id': company_id},
            order_by='time desc',
            limit=10,
            offset=0,
            duration='1h',
            count_only=True
        )

        all_types = get_all_types()
        all_measurements = list(all_types.keys())
        message_logs = tsdb.get_points_raw(
            all_measurements,
            tags={'company_id': company.id},
            order_by='time desc',
            limit=10,
            offset=0,
            duration='1h',
            count_only=True
        )
        new_sensors = Sensor.query.filter(Sensor.company_id == company.id).filter(Sensor.deleted == False).filter(Sensor.created_at >= last_hr).count()
        daily_hits = tsdb.get_points_raw(
            DAILY_ANALYTICS_SERIES,
            tags={'company_id': company_id},
            order_by='time desc',
            limit=100,
            offset=0
        )

        return {
            "devices_online": total_sensors - total_sensors_down,
            "alerts_generated": alert_logs['total'],
            "message_received": message_logs['total'],
            "new_devices": new_sensors,
            "daily_hits": daily_hits
        }
    def get(self, company_id):
        """Get alert trigger history"""
        order_by, order_type, offset, limit, filter = get_filters(in_request=request)

        company = Company.query.filter(Company.uid == company_id).filter(Company.deleted == False).first()
        tags = {"company_id": company.uid}
        if "sensor_id" in filter.keys():
            tags["sensor_id"] = filter["sensor_id"]
        try:
            return tsdb.get_points_raw(EVENT_LOG_SERIES, tags=tags, order_by='time desc', limit=limit, offset=offset)
        except Exception as e:
            _LOGGER.error(e)
            return {'error': 'Server error'}, 500
    def get(self, company_id):
        company = Company.query.filter(Company.uid == company_id).filter(Company.deleted == False).first()
        total_sensors = Sensor.query.filter(Sensor.company_id == company.id).filter(Sensor.deleted == False).count()
        total_sensors_down = Sensor.query.filter(Sensor.company_id == company.id).filter(Sensor.deleted == False).filter(Sensor.is_down == True).count()
        total_network = Network.query.filter(Network.company_id == company.id).filter(Network.deleted == False).count()
        total_alerts = Alert.query.filter(Alert.company_id == company.id).filter(Alert.deleted == False).count()
        event_logs = tsdb.get_points_raw(EVENT_LOG_SERIES, tags={'company_id': company_id}, order_by='time desc', limit=10, offset=0)

        return {
            "total_sensors": total_sensors,
            "total_sensors_up": total_sensors - total_sensors_down,
            "total_sensors_down": total_sensors_down,
            "total_network": total_network,
            "total_alerts": total_alerts,
            "event_logs": event_logs['data']
        }
    def get(self, company_id):
        """Get event run history"""
        order_by, order_type, offset, limit, filter = get_filters(in_request=request)

        company = Company.query.filter(Company.uid == company_id).filter(Company.deleted == False).first()
        tags = {"company_id": company.uid}
        if "sensor_id" in filter.keys():
            tags["sensor_id"] = filter["sensor_id"]
        if "event_id" in filter.keys():
            event = Event.query.filter(Event.id == filter["event_id"]).filter(Event.deleted_at == None).first()
            if event:
                tags["event_id"] = event.id
        try:
            return tsdb.get_points_raw(EVENT_HISTORY_SERIES, tags=tags, order_by='time desc', limit=limit, offset=offset)
        except Exception as e:
            _LOGGER.error(e)
            return {'error': 'Server error'}, 500
def devices_and_calls():
    _LOGGER.debug("Running devices_and_calls task")
    now = datetime.datetime.utcnow()
    now = now.replace(minute=0, microsecond=0)
    last_day = now - datetime.timedelta(minutes=1)
    last_day = last_day.replace(hour=0, minute=0, second=0, microsecond=0)
    companies = Company.query.filter(Company.deleted == False).all()
    total_sensors = db.session.query(func.count(Sensor.id), Sensor.company_id).filter(Sensor.last_update >= last_day).group_by(Sensor.company_id).all()

    all_types = get_all_types()
    all_measurements = list(all_types.keys())

    for company in companies:
        active_sensors = [x[0] for x in total_sensors if x[1] == company.id]
        if len(active_sensors) == 0:
            active_sensors = [0]
        for count in active_sensors:
            message_count = 0
            if len(all_measurements) > 0:
                for measurment in all_measurements:
                    try:
                        message_logs = tsdb.get_points_raw(measurment, tags={'company_id': company.id}, order_by='time desc', limit=10, offset=0, start_date=last_day.strftime("%Y-%m-%d %H:%M:%S"), count_only=True)
                        message_count += message_logs['total']
                    except Exception as e:
                        _LOGGER.error(e)
            _LOGGER.info("{} :-----------: {} :-------------: {}".format(company.name, count, message_count))
            tsdb.add_series(
                    DAILY_ANALYTICS_SERIES,
                    {
                        "company_id": company.uid,
                        'period': 'daily'
                    },
                    {
                        'message_count': message_count,
                        'active_sensors': count
                    },
                    time=last_day.isoformat()
                )
Exemple #6
0
def delete_sensor_data(sensor_type,
                       company_id=None,
                       sensor_id=None,
                       start_date=None,
                       end_date=None):
    # Get all sensor type
    # TODO: Delete files data
    # TODO: Delete timeseries data
    # TODO: Get all the sensors, if sensor has file data, delete that data
    try:
        tags = {}
        if sensor_id:
            tags['sensor_id'] = sensor_id
        if company_id:
            tags['company_id'] = company_id

        if sensor_id:
            sensor = Sensor.query.get(sensor_id)
            if not sensor:
                _LOGGER.error("Sensor not found")
                return
            sensor_type_model = SensorType.query.filter(
                SensorType.type == sensor_type).first()
            files_data = False
            file_field = None
            if sensor_type_model:
                # Check if sensor has files data
                for _field_name, field in sensor_type_model.value_fields.items(
                ):
                    _LOGGER.debug(field)
                    if field['type'] == 'file':
                        files_data = True
                        file_field = _field_name
                        break
            if files_data:
                # Get first and last filename
                last_file_name = None
                files_start_date = start_date
                files_end_date = end_date
                points = tsdb.get_points_raw(sensor_type,
                                             tags=tags,
                                             start_date=start_date,
                                             end_date=end_date,
                                             limit=100)
                total = points.get('total', 0)
                _LOGGER.debug(points)
                if total > 0:
                    for row in points['data']:
                        last_file_name = row.get(file_field, None)
                        if last_file_name:
                            break

                    _LOGGER.debug(last_file_name)

                    first_file_name = None
                    if total > 50:
                        total -= 50
                    else:
                        total = 0
                    points = tsdb.get_points_raw(sensor_type,
                                                 tags=tags,
                                                 start_date=start_date,
                                                 end_date=end_date,
                                                 offset=total,
                                                 limit=100)
                    total = points.get('total', 0)
                    _LOGGER.debug(points)
                    if total > 0:
                        for row in points['data']:
                            _name = row.get(file_field, None)
                            if _name:
                                first_file_name = _name
                    _LOGGER.debug(first_file_name)

                    if first_file_name:
                        first_file = BinFile.query.filter(
                            BinFile.uid == first_file_name).filter(
                                BinFile.sensor_id == sensor_id).first()
                        if first_file:
                            files_start_date = first_file.created_at
                            _LOGGER.debug(files_start_date)
                    if last_file_name:
                        last_file = BinFile.query.filter(
                            BinFile.uid == last_file_name).filter(
                                BinFile.sensor_id == sensor_id).first()
                        if last_file:
                            files_end_date = last_file.created_at
                            _LOGGER.debug(files_end_date)
                return
            query = db.session.query(BinFile).filter(
                BinFile.sensor_id == sensor_id)
            if start_date:
                query = query.filter(
                    BinFile.created_at >= parser.parse(start_date).isoformat())
            if end_date:
                query = query.filter(
                    BinFile.created_at <= parser.parse(end_date).isoformat())
            query.delete()
            db.session.commit()

        tsdb.delete_points(sensor_type,
                           tags,
                           end_date=end_date,
                           start_date=start_date)
    except Exception as e:
        _LOGGER.error(e)