예제 #1
0
    def get_total_requests(self):
        now = datetime.now(timezone.utc).astimezone().isoformat()
        tv_endpoint = '/api/v1/Request/tv'
        movie_endpoint = "/api/v1/Request/movie"

        tv_req = self.session.prepare_request(
            Request('GET', self.server.url + tv_endpoint))
        movie_req = self.session.prepare_request(
            Request('GET', self.server.url + movie_endpoint))
        get_tv = connection_handler(self.session, tv_req,
                                    self.server.verify_ssl)
        get_movie = connection_handler(self.session, movie_req,
                                       self.server.verify_ssl)

        if not all([get_tv, get_movie]):
            return

        movie_requests = len(get_movie)
        tv_requests = len(get_tv)

        influx_payload = [{
            "measurement": "Ombi",
            "tags": {
                "type": "Request_Total",
                "server": self.server.id
            },
            "time": now,
            "fields": {
                "total": movie_requests + tv_requests,
                "movies": movie_requests,
                "tv_shows": tv_requests
            }
        }]

        self.dbmanager.write_points(influx_payload)
예제 #2
0
    def get_request_counts(self):
        now = datetime.now(timezone.utc).astimezone().isoformat()
        endpoint = '/api/v1/Request/count'

        req = self.session.prepare_request(
            Request('GET', self.server.url + endpoint))
        get = connection_handler(self.session, req, self.server.verify_ssl)

        if not get:
            return

        requests = OmbiRequestCounts(**get)
        influx_payload = [{
            "measurement": "Ombi",
            "tags": {
                "type": "Request_Counts"
            },
            "time": now,
            "fields": {
                "pending": requests.pending,
                "approved": requests.approved,
                "available": requests.available
            }
        }]

        self.dbmanager.write_points(influx_payload)
예제 #3
0
    def get_bandwidth(self):
        self.now = datetime.now(timezone.utc).astimezone().isoformat()
        endpoint = '/api/monitoring/device/interfaces/' + self.firewall.outside_interface

        if not self.session.headers:
            return

        req = self.session.prepare_request(
            Request('GET', self.firewall.url + endpoint))
        get = connection_handler(self.session, req, self.firewall.verify_ssl)

        if not get:
            return

        influx_payload = [{
            "measurement": "Cisco ASA",
            "tags": {
                "interface": self.firewall.outside_interface
            },
            "time": self.now,
            "fields": {
                "upload_bitrate": get['outputBitRate'],
                "download_bitrate": get['inputBitRate']
            }
        }]

        self.dbmanager.write_points(influx_payload)
예제 #4
0
파일: tautulli.py 프로젝트: arnesweb/Varken
    def get_stats(self):
        now = datetime.now(timezone.utc).astimezone().isoformat()
        influx_payload = []
        params = {'cmd': 'get_libraries'}

        req = self.session.prepare_request(
            Request('GET', self.server.url + self.endpoint, params=params))
        g = connection_handler(self.session, req, self.server.verify_ssl)

        if not g:
            return

        get = g['response']['data']

        for library in get:
            data = {
                "measurement": "Tautulli",
                "tags": {
                    "type": "library_stats",
                    "server": self.server.id,
                    "section_name": library['section_name'],
                    "section_type": library['section_type']
                },
                "time": now,
                "fields": {
                    "total": int(library['count'])
                }
            }
            if library['section_type'] == 'show':
                data['fields']['seasons'] = int(library['parent_count'])
                data['fields']['episodes'] = int(library['child_count'])
            influx_payload.append(data)

        self.dbmanager.write_points(influx_payload)
예제 #5
0
    def get_issue_counts(self):
        now = datetime.now(timezone.utc).astimezone().isoformat()
        endpoint = '/api/v1/Issues/count'

        req = self.session.prepare_request(
            Request('GET', self.server.url + endpoint))
        get = connection_handler(self.session, req, self.server.verify_ssl)

        if not get:
            return

        requests = OmbiIssuesCounts(**get)
        influx_payload = [{
            "measurement": "Ombi",
            "tags": {
                "type": "Issues_Counts"
            },
            "time": now,
            "fields": {
                "pending": requests.pending,
                "in_progress": requests.inProgress,
                "resolved": requests.resolved
            }
        }]

        self.dbmanager.write_points(influx_payload)
예제 #6
0
    def get_queue(self):
        influx_payload = []
        endpoint = '/api/queue'
        now = datetime.now(timezone.utc).astimezone().isoformat()
        queue = []

        req = self.session.prepare_request(
            Request('GET', self.server.url + endpoint))
        get = connection_handler(self.session, req, self.server.verify_ssl)

        if not get:
            return

        try:
            download_queue = [Queue(**show) for show in get]
        except TypeError as e:
            self.logger.error(
                'TypeError has occurred : %s while creating Queue structure',
                e)
            return

        for show in download_queue:
            try:
                sxe = f"S{show.episode['seasonNumber']:0>2}E{show.episode['episodeNumber']:0>2}"
            except TypeError as e:
                self.logger.error(
                    'TypeError has occurred : %s while processing the sonarr queue. \
                                  Remove invalid queue entries.', e)
                continue

            if show.protocol.upper() == 'USENET':
                protocol_id = 1
            else:
                protocol_id = 0

            queue.append((show.series['title'], show.episode['title'],
                          show.protocol.upper(), protocol_id, sxe, show.id,
                          show.quality['quality']['name']))

        for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id, quality in queue:
            hash_id = hashit(f'{self.server.id}{series_title}{sxe}')
            influx_payload.append({
                "measurement": "Sonarr",
                "tags": {
                    "type": "Queue",
                    "sonarrId": sonarr_id,
                    "server": self.server.id,
                    "name": series_title,
                    "epname": episode_title,
                    "sxe": sxe,
                    "protocol": protocol,
                    "protocol_id": protocol_id,
                    "quality": quality
                },
                "time": now,
                "fields": {
                    "hash": hash_id
                }
            })
        self.dbmanager.write_points(influx_payload)
예제 #7
0
    def get_future(self):
        endpoint = '/api/calendar/'
        today = str(date.today())
        now = datetime.now(timezone.utc).astimezone().isoformat()
        future = str(date.today() + timedelta(days=self.server.future_days))
        influx_payload = []
        air_days = []
        params = {'start': today, 'end': future}

        req = self.session.prepare_request(
            Request('GET', self.server.url + endpoint, params=params))
        get = connection_handler(self.session, req, self.server.verify_ssl)

        if not get:
            return

        try:
            tv_shows = [TVShow(**show) for show in get]
        except TypeError as e:
            self.logger.error(
                'TypeError has occurred : %s while creating TVShow structure',
                e)
            return

        for show in tv_shows:
            sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber,
                                          show.episodeNumber)
            if show.hasFile:
                downloaded = 1
            else:
                downloaded = 0
            air_days.append((show.series['title'], downloaded, sxe, show.title,
                             show.airDate, show.id))

        for series_title, dl_status, sxe, episode_title, air_date, sonarr_id in air_days:
            hash_id = hashit('{}{}{}'.format(self.server.id, series_title,
                                             sxe))
            influx_payload.append({
                "measurement": "Sonarr",
                "tags": {
                    "type": "Future",
                    "sonarrId": sonarr_id,
                    "server": self.server.id,
                    "name": series_title,
                    "epname": episode_title,
                    "sxe": sxe,
                    "airs": air_date,
                    "downloaded": dl_status
                },
                "time": now,
                "fields": {
                    "hash": hash_id
                }
            })

        self.dbmanager.write_points(influx_payload)
예제 #8
0
    def get_missing(self):
        endpoint = '/api/calendar'
        today = str(date.today())
        last_days = str(date.today() +
                        timedelta(days=-self.server.missing_days))
        now = datetime.now(timezone.utc).astimezone().isoformat()
        params = {'start': last_days, 'end': today}
        influx_payload = []
        missing = []

        req = self.session.prepare_request(
            Request('GET', self.server.url + endpoint, params=params))
        get = connection_handler(self.session, req, self.server.verify_ssl)

        if not get:
            return

        # Iteratively create a list of TVShow Objects from response json
        try:
            tv_shows = [TVShow(**show) for show in get]
        except TypeError as e:
            self.logger.error(
                'TypeError has occurred : %s while creating TVShow structure',
                e)
            return

        # Add show to missing list if file does not exist
        for show in tv_shows:
            if not show.hasFile:
                sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber,
                                              show.episodeNumber)
                missing.append((show.series['title'], sxe, show.airDate,
                                show.title, show.id))

        for series_title, sxe, air_date, episode_title, sonarr_id in missing:
            hash_id = hashit('{}{}{}'.format(self.server.id, series_title,
                                             sxe))
            influx_payload.append({
                "measurement": "Sonarr",
                "tags": {
                    "type": "Missing",
                    "sonarrId": sonarr_id,
                    "server": self.server.id,
                    "name": series_title,
                    "epname": episode_title,
                    "sxe": sxe,
                    "airs": air_date
                },
                "time": now,
                "fields": {
                    "hash": hash_id
                }
            })

        self.dbmanager.write_points(influx_payload)
예제 #9
0
파일: unifi.py 프로젝트: arnesweb/Varken
    def get_cookie(self):
        endpoint = '/api/login'
        pre_cookies = {'username': self.server.username, 'password': self.server.password, 'remember': True}
        req = self.session.prepare_request(Request('POST', self.server.url + endpoint, json=pre_cookies))
        post = connection_handler(self.session, req, self.server.verify_ssl, as_is_reply=True)

        if not post.cookies.get('unifises'):
            return

        cookies = {'unifises': post.cookies.get('unifises')}
        self.session.cookies.update(cookies)
예제 #10
0
    def get_token(self):
        endpoint = '/api/tokenservices'

        req = self.session.prepare_request(
            Request('POST', self.firewall.url + endpoint))
        post = connection_handler(self.session, req, self.firewall.verify_ssl)

        if not post:
            return

        self.session.headers = {'X-Auth-Token': post}
예제 #11
0
파일: unifi.py 프로젝트: slimshizn/Varken
    def get_usg_stats(self):
        now = datetime.now(timezone.utc).astimezone().isoformat()
        endpoint = f'/api/s/{self.server.site}/stat/device'
        req = self.session.prepare_request(
            Request('GET', self.server.url + endpoint))
        get = connection_handler(self.session, req, self.server.verify_ssl)

        if not get:
            self.logger.error("Disregarding Job get_usg_stats for unifi-%s",
                              self.server.id)
            return

        devices = {device['name']: device for device in get['data']}
        if devices.get(self.server.usg_name):
            device = devices[self.server.usg_name]
        else:
            self.logger.error(
                "Could not find a USG named %s from your UniFi Controller",
                self.server.usg_name)
            return

        try:
            influx_payload = [{
                "measurement": "UniFi",
                "tags": {
                    "model": device['model'],
                    "name": device['name']
                },
                "time": now,
                "fields": {
                    "bytes_current": device['wan1']['bytes-r'],
                    "rx_bytes_total": device['wan1']['rx_bytes'],
                    "rx_bytes_current": device['wan1']['rx_bytes-r'],
                    "tx_bytes_total": device['wan1']['tx_bytes'],
                    "tx_bytes_current": device['wan1']['tx_bytes-r'],
                    # Commenting speedtest out until Unifi gets their shit together
                    # "speedtest_latency": device['speedtest-status']['latency'],
                    # "speedtest_download": device['speedtest-status']['xput_download'],
                    # "speedtest_upload": device['speedtest-status']['xput_upload'],
                    "cpu_loadavg_1": float(device['sys_stats']['loadavg_1']),
                    "cpu_loadavg_5": float(device['sys_stats']['loadavg_5']),
                    "cpu_loadavg_15": float(device['sys_stats']['loadavg_15']),
                    "cpu_util": float(device['system-stats']['cpu']),
                    "mem_util": float(device['system-stats']['mem']),
                }
            }]
            self.dbmanager.write_points(influx_payload)
        except KeyError as e:
            self.logger.error(
                'Error building payload for unifi. Discarding. Error: %s', e)
예제 #12
0
    def get_queue(self):
        endpoint = '/api/v1/queue'
        now = datetime.now(timezone.utc).astimezone().isoformat()
        influx_payload = []
        params = {'pageSize': 1000}

        req = self.session.prepare_request(
            Request('GET', self.server.url + endpoint, params=params))
        get = connection_handler(self.session, req, self.server.verify_ssl)

        if not get:
            return

        queue = []
        for song in get['records']:
            try:
                queue.append(LidarrQueue(**song))
            except TypeError as e:
                self.logger.error(
                    'TypeError has occurred : %s while creating LidarrQueue structure for show. Data '
                    'attempted is: %s', e, song)

        if not queue:
            return

        for song in queue:
            if song.protocol.upper() == 'USENET':
                protocol_id = 1
            else:
                protocol_id = 0
            hash_id = hashit(f'{self.server.id}{song.title}{song.artistId}')
            influx_payload.append({
                "measurement": "Lidarr",
                "tags": {
                    "type": "Queue",
                    "id": song.id,
                    "server": self.server.id,
                    "title": song.title,
                    "quality": song.quality['quality']['name'],
                    "protocol": song.protocol,
                    "protocol_id": protocol_id,
                    "indexer": song.indexer
                },
                "time": now,
                "fields": {
                    "hash": hash_id
                }
            })

        self.dbmanager.write_points(influx_payload)
예제 #13
0
    def get_missing(self):
        endpoint = '/api/movie'
        now = datetime.now(timezone.utc).astimezone().isoformat()
        influx_payload = []
        missing = []

        req = self.session.prepare_request(
            Request('GET', self.server.url + endpoint))
        get = connection_handler(self.session, req, self.server.verify_ssl)

        if not get:
            return

        try:
            movies = [RadarrMovie(**movie) for movie in get]
        except TypeError as e:
            self.logger.error(
                'TypeError has occurred : %s while creating RadarrMovie structure',
                e)
            return

        for movie in movies:
            if movie.monitored and not movie.downloaded:
                if movie.isAvailable:
                    ma = 0
                else:
                    ma = 1

                movie_name = f'{movie.title} ({movie.year})'
                missing.append((movie_name, ma, movie.tmdbId, movie.titleSlug))

        for title, ma, mid, title_slug in missing:
            hash_id = hashit(f'{self.server.id}{title}{mid}')
            influx_payload.append({
                "measurement": "Radarr",
                "tags": {
                    "Missing": True,
                    "Missing_Available": ma,
                    "tmdbId": mid,
                    "server": self.server.id,
                    "name": title,
                    "titleSlug": title_slug
                },
                "time": now,
                "fields": {
                    "hash": hash_id
                }
            })

        self.dbmanager.write_points(influx_payload)
예제 #14
0
    def get_site(self):
        endpoint = '/api/self/sites'
        req = self.session.prepare_request(
            Request('GET', self.server.url + endpoint))
        get = connection_handler(self.session, req, self.server.verify_ssl)

        if not get:
            self.logger.error(
                "Could not get list of sites from UniFi Controller")
            return
        site = [
            site['name'] for site in get['data']
            if site['name'].lower() == self.server.site.lower()
            or site['desc'].lower() == self.server.site.lower()
        ]
        if site:
            self.site = site[0]
        else:
            self.logger.error(
                f"Could not map site {self.server.site} to a site id/alias")
예제 #15
0
    def get_missing(self):
        now = datetime.now(timezone.utc).astimezone().isoformat()
        influx_payload = []
        params = {
            'cmd': 'future',
            'paused': 1,
            'type': 'missed|today|soon|later|snatched'
        }

        req = self.session.prepare_request(
            Request('GET', self.server.url + self.endpoint, params=params))
        get = connection_handler(self.session, req, self.server.verify_ssl)

        if not get:
            return

        try:
            for key, section in get['data'].items():
                get['data'][key] = [
                    SickChillTVShow(**show) for show in section
                ]
        except TypeError as e:
            self.logger.error(
                'TypeError has occurred : %s while creating SickChillTVShow structure',
                e)
            return

        for key, section in get['data'].items():
            for show in section:
                sxe = f'S{show.season:0>2}E{show.episode:0>2}'
                hash_id = hashit(f'{self.server.id}{show.show_name}{sxe}')
                missing_types = [(0, 'future'), (1, 'later'), (2, 'soon'),
                                 (3, 'today'), (4, 'missed')]
                try:
                    influx_payload.append({
                        "measurement": "SickChill",
                        "tags": {
                            "type":
                            [item[0] for item in missing_types
                             if key in item][0],
                            "indexerid":
                            show.indexerid,
                            "server":
                            self.server.id,
                            "name":
                            show.show_name,
                            "epname":
                            show.ep_name,
                            "sxe":
                            sxe,
                            "airdate":
                            show.airdate,
                        },
                        "time": now,
                        "fields": {
                            "hash": hash_id
                        }
                    })
                except IndexError as e:
                    self.logger.error(
                        'Error building payload for sickchill. Discarding. Error: %s',
                        e)

        if influx_payload:
            self.dbmanager.write_points(influx_payload)
예제 #16
0
    def get_calendar(self, query="Missing"):
        endpoint = '/api/calendar/'
        today = str(date.today())
        last_days = str(date.today() -
                        timedelta(days=self.server.missing_days))
        future = str(date.today() + timedelta(days=self.server.future_days))
        now = datetime.now(timezone.utc).astimezone().isoformat()
        if query == "Missing":
            params = {'start': last_days, 'end': today}
        else:
            params = {'start': today, 'end': future}
        influx_payload = []
        air_days = []
        missing = []

        req = self.session.prepare_request(
            Request('GET', self.server.url + endpoint, params=params))
        get = connection_handler(self.session, req, self.server.verify_ssl)

        if not get:
            return

        tv_shows = []
        for show in get:
            try:
                tv_shows.append(SonarrTVShow(**show))
            except TypeError as e:
                self.logger.error(
                    'TypeError has occurred : %s while creating SonarrTVShow structure for show. Data '
                    'attempted is: %s', e, show)

        for show in tv_shows:
            sxe = f'S{show.seasonNumber:0>2}E{show.episodeNumber:0>2}'
            if show.hasFile:
                downloaded = 1
            else:
                downloaded = 0
            if query == "Missing":
                if not downloaded:
                    missing.append((show.series['title'], downloaded, sxe,
                                    show.airDateUtc, show.title, show.id))
            else:
                air_days.append((show.series['title'], downloaded, sxe,
                                 show.title, show.airDateUtc, show.id))

        for series_title, dl_status, sxe, episode_title, air_date_utc, sonarr_id in (
                air_days or missing):
            hash_id = hashit(f'{self.server.id}{series_title}{sxe}')
            influx_payload.append({
                "measurement": "Sonarr",
                "tags": {
                    "type": query,
                    "sonarrId": sonarr_id,
                    "server": self.server.id,
                    "name": series_title,
                    "epname": episode_title,
                    "sxe": sxe,
                    "airsUTC": air_date_utc,
                    "downloaded": dl_status
                },
                "time": now,
                "fields": {
                    "hash": hash_id
                }
            })

        self.dbmanager.write_points(influx_payload)
예제 #17
0
    def get_all_requests(self):
        now = datetime.now(timezone.utc).astimezone().isoformat()
        tv_endpoint = '/api/v1/Request/tv'
        movie_endpoint = "/api/v1/Request/movie"

        tv_req = self.session.prepare_request(
            Request('GET', self.server.url + tv_endpoint))
        movie_req = self.session.prepare_request(
            Request('GET', self.server.url + movie_endpoint))
        get_tv = connection_handler(self.session, tv_req,
                                    self.server.verify_ssl)
        get_movie = connection_handler(self.session, movie_req,
                                       self.server.verify_ssl)

        if not any([get_tv, get_movie]):
            self.logger.error('No json replies. Discarding job')
            return

        movie_request_count = len(get_movie)
        tv_request_count = len(get_tv)

        try:
            tv_show_requests = [OmbiTVRequest(**show) for show in get_tv]
        except TypeError as e:
            self.logger.error(
                'TypeError has occurred : %s while creating OmbiTVRequest structure',
                e)
            return

        try:
            movie_requests = [OmbiMovieRequest(**movie) for movie in get_movie]
        except TypeError as e:
            self.logger.error(
                'TypeError has occurred : %s while creating OmbiMovieRequest structure',
                e)
            return

        influx_payload = [{
            "measurement": "Ombi",
            "tags": {
                "type": "Request_Total",
                "server": self.server.id
            },
            "time": now,
            "fields": {
                "total": movie_request_count + tv_request_count,
                "movies": movie_request_count,
                "tv_shows": tv_request_count
            }
        }]
        # Request Type: Movie = 1, TV Show = 0
        for movie in movie_requests:
            hash_id = hashit(f'{movie.id}{movie.theMovieDbId}{movie.title}')

            # Denied = 0, Approved = 1, Completed = 2, Pending = 3
            if movie.denied:
                status = 0

            elif movie.approved and movie.available:
                status = 2

            elif movie.approved:
                status = 1

            else:
                status = 3

            influx_payload.append({
                "measurement": "Ombi",
                "tags": {
                    "type": "Requests",
                    "server": self.server.id,
                    "request_type": 1,
                    "status": status,
                    "title": movie.title,
                    "requested_user": movie.requestedUser['userAlias'],
                    "requested_date": movie.requestedDate
                },
                "time": now,
                "fields": {
                    "hash": hash_id
                }
            })

        for show in tv_show_requests:
            hash_id = hashit(f'{show.id}{show.tvDbId}{show.title}')

            # Denied = 0, Approved = 1, Completed = 2, Pending = 3
            if show.childRequests[0]['denied']:
                status = 0

            elif show.childRequests[0]['approved'] and show.childRequests[0][
                    'available']:
                status = 2

            elif show.childRequests[0]['approved']:
                status = 1

            else:
                status = 3

            influx_payload.append({
                "measurement": "Ombi",
                "tags": {
                    "type":
                    "Requests",
                    "server":
                    self.server.id,
                    "request_type":
                    0,
                    "status":
                    status,
                    "title":
                    show.title,
                    "requested_user":
                    show.childRequests[0]['requestedUser']['userAlias'],
                    "requested_date":
                    show.childRequests[0]['requestedDate']
                },
                "time": now,
                "fields": {
                    "hash": hash_id
                }
            })

        self.dbmanager.write_points(influx_payload)
예제 #18
0
    def get_historical(self, days=30):
        influx_payload = []
        start_date = date.today() - timedelta(days=days)
        params = {'cmd': 'get_history', 'grouping': 1, 'length': 1000000}
        req = self.session.prepare_request(
            Request('GET', self.server.url + self.endpoint, params=params))
        g = connection_handler(self.session, req, self.server.verify_ssl)

        if not g:
            return

        get = g['response']['data']['data']

        params = {'cmd': 'get_stream_data', 'row_id': 0}
        sessions = []
        for history_item in get:
            if not history_item['id']:
                self.logger.debug('Skipping entry with no ID. (%s)',
                                  history_item['full_title'])
                continue
            if date.fromtimestamp(history_item['started']) < start_date:
                continue
            params['row_id'] = history_item['id']
            req = self.session.prepare_request(
                Request('GET', self.server.url + self.endpoint, params=params))
            g = connection_handler(self.session, req, self.server.verify_ssl)
            if not g:
                self.logger.debug(
                    'Could not get historical stream data for %s. Skipping.',
                    history_item['full_title'])
            try:
                self.logger.debug('Adding %s to history',
                                  history_item['full_title'])
                history_item.update(g['response']['data'])
                sessions.append(TautulliStream(**history_item))
            except TypeError as e:
                self.logger.error(
                    'TypeError has occurred : %s while creating TautulliStream structure',
                    e)
                continue

        for session in sessions:
            try:
                geodata = self.geoiphandler.lookup(session.ip_address)
            except (ValueError, AddressNotFoundError):
                self.logger.debug('Public IP missing for Tautulli session...')
                if not self.my_ip:
                    # Try the fallback ip in the config file
                    try:
                        self.logger.debug(
                            'Attempting to use the fallback IP...')
                        geodata = self.geoiphandler.lookup(
                            self.server.fallback_ip)
                    except AddressNotFoundError as e:
                        self.logger.error('%s', e)

                        self.my_ip = self.session.get(
                            'http://ip.42.pl/raw').text
                        self.logger.debug(
                            'Looked the public IP and set it to %s',
                            self.my_ip)

                        geodata = self.geoiphandler.lookup(self.my_ip)

                else:
                    geodata = self.geoiphandler.lookup(self.my_ip)

            if not all([geodata.location.latitude, geodata.location.longitude
                        ]):
                latitude = 37.234332396
                longitude = -115.80666344
            else:
                latitude = geodata.location.latitude
                longitude = geodata.location.longitude

            if not geodata.city.name:
                location = '👽'
            else:
                location = geodata.city.name

            decision = session.transcode_decision
            if decision == 'copy':
                decision = 'direct stream'

            video_decision = session.stream_video_decision
            if video_decision == 'copy':
                video_decision = 'direct stream'
            elif video_decision == '':
                video_decision = 'Music'

            quality = session.stream_video_resolution
            if not quality:
                quality = session.container.upper()
            elif quality in ('SD', 'sd', '4k'):
                quality = session.stream_video_resolution.upper()
            elif session.stream_video_full_resolution:
                quality = session.stream_video_full_resolution
            else:
                quality = session.stream_video_resolution + 'p'

            # Platform Overrides
            platform_name = session.platform
            if platform_name in 'osx':
                platform_name = 'Plex Mac OS'
            if platform_name in 'windows':
                platform_name = 'Plex Windows'

            player_state = 100

            hash_id = hashit(
                f'{session.id}{session.session_key}{session.user}{session.full_title}'
            )
            influx_payload.append({
                "measurement":
                "Tautulli",
                "tags": {
                    "type": "Session",
                    "session_id": session.session_id,
                    "friendly_name": session.friendly_name,
                    "username": session.user,
                    "title": session.full_title,
                    "product": session.product,
                    "platform": platform_name,
                    "quality": quality,
                    "video_decision": video_decision.title(),
                    "transcode_decision": decision.title(),
                    "transcode_hw_decoding": session.transcode_hw_decoding,
                    "transcode_hw_encoding": session.transcode_hw_encoding,
                    "media_type": session.media_type.title(),
                    "audio_codec": session.audio_codec.upper(),
                    "stream_audio_codec": session.stream_audio_codec.upper(),
                    "quality_profile": session.quality_profile,
                    "progress_percent": session.progress_percent,
                    "region_code": geodata.subdivisions.most_specific.iso_code,
                    "location": location,
                    "full_location":
                    f'{geodata.subdivisions.most_specific.name} - {geodata.city.name}',
                    "latitude": latitude,
                    "longitude": longitude,
                    "player_state": player_state,
                    "device_type": platform_name,
                    "relayed": session.relayed,
                    "secure": session.secure,
                    "server": self.server.id
                },
                "time":
                datetime.fromtimestamp(
                    session.stopped).astimezone().isoformat(),
                "fields": {
                    "hash": hash_id
                }
            })
            try:
                self.dbmanager.write_points(influx_payload)
            except InfluxDBClientError as e:
                if "beyond retention policy" in str(e):
                    self.logger.debug(
                        'Only imported 30 days of data per retention policy')
                else:
                    self.logger.error(
                        'Something went wrong... post this output in discord: %s',
                        e)
예제 #19
0
    def get_queue(self):
        endpoint = '/api/queue'
        now = datetime.now(timezone.utc).astimezone().isoformat()
        influx_payload = []
        queue = []

        req = self.session.prepare_request(
            Request('GET', self.server.url + endpoint))
        get = connection_handler(self.session, req, self.server.verify_ssl)

        if not get:
            return

        for movie in get:
            try:
                movie['movie'] = RadarrMovie(**movie['movie'])
            except TypeError as e:
                self.logger.error(
                    'TypeError has occurred : %s while creating RadarrMovie structure',
                    e)
                return

        try:
            download_queue = [Queue(**movie) for movie in get]
        except TypeError as e:
            self.logger.error(
                'TypeError has occurred : %s while creating Queue structure',
                e)
            return

        for queue_item in download_queue:
            movie = queue_item.movie

            name = f'{movie.title} ({movie.year})'

            if queue_item.protocol.upper() == 'USENET':
                protocol_id = 1
            else:
                protocol_id = 0

            queue.append((name, queue_item.quality['quality']['name'],
                          queue_item.protocol.upper(), protocol_id,
                          queue_item.id, movie.titleSlug))

        for name, quality, protocol, protocol_id, qid, title_slug in queue:
            hash_id = hashit(f'{self.server.id}{name}{quality}')
            influx_payload.append({
                "measurement": "Radarr",
                "tags": {
                    "type": "Queue",
                    "tmdbId": qid,
                    "server": self.server.id,
                    "name": name,
                    "quality": quality,
                    "protocol": protocol,
                    "protocol_id": protocol_id,
                    "titleSlug": title_slug
                },
                "time": now,
                "fields": {
                    "hash": hash_id
                }
            })

        self.dbmanager.write_points(influx_payload)
예제 #20
0
파일: tautulli.py 프로젝트: tkuennen/Varken
    def get_activity(self):
        self.now = datetime.now(timezone.utc).astimezone().isoformat()
        influx_payload = []

        req = self.session.prepare_request(Request('GET', self.server.url + self.endpoint))
        g = connection_handler(self.session, req, self.server.verify_ssl)

        if not g:
            return

        get = g['response']['data']

        try:
            sessions = [TautulliStream(**session) for session in get['sessions']]
        except TypeError as e:
            self.logger.error('TypeError has occurred : %s while creating TautulliStream structure', e)
            return

        for session in sessions:
            try:
                geodata = geo_lookup(session.ip_address_public)
            except (ValueError, AddressNotFoundError):
                if self.server.fallback_ip:
                    geodata = geo_lookup(self.server.fallback_ip)
                else:
                    my_ip = self.session.get('http://ip.42.pl/raw').text
                    geodata = geo_lookup(my_ip)

            if not all([geodata.location.latitude, geodata.location.longitude]):
                latitude = 37.234332396
                longitude = -115.80666344
            else:
                latitude = geodata.location.latitude
                longitude = geodata.location.longitude

            decision = session.transcode_decision
            if decision == 'copy':
                decision = 'direct stream'

            video_decision = session.stream_video_decision
            if video_decision == 'copy':
                video_decision = 'direct stream'
            elif video_decision == '':
                video_decision = 'Music'

            quality = session.stream_video_resolution
            if not quality:
                quality = session.container.upper()
            elif quality in ('SD', 'sd', '4k'):
                quality = session.stream_video_resolution.upper()
            else:
                quality = session.stream_video_resolution + 'p'

            player_state = session.state.lower()
            if player_state == 'playing':
                player_state = 0
            elif player_state == 'paused':
                player_state = 1
            elif player_state == 'buffering':
                player_state = 3

            product_version = session.product_version
            if session.platform == 'Roku':
                product_version = session.product_version.split('-')[0]

            hash_id = hashit('{}{}{}{}'.format(session.session_id, session.session_key, session.username,
                                               session.full_title))
            influx_payload.append(
                {
                    "measurement": "Tautulli",
                    "tags": {
                        "type": "Session",
                        "session_id": session.session_id,
                        "friendly_name": session.friendly_name,
                        "username": session.username,
                        "title": session.full_title,
                        "platform": session.platform,
                        "product_version": product_version,
                        "quality": quality,
                        "video_decision": video_decision.title(),
                        "transcode_decision": decision.title(),
                        "media_type": session.media_type.title(),
                        "audio_codec": session.audio_codec.upper(),
                        "audio_profile": session.audio_profile.upper(),
                        "stream_audio_codec": session.stream_audio_codec.upper(),
                        "quality_profile": session.quality_profile,
                        "progress_percent": session.progress_percent,
                        "region_code": geodata.subdivisions.most_specific.iso_code,
                        "location": geodata.city.name,
                        "full_location": '{} - {}'.format(geodata.subdivisions.most_specific.name,
                                                          geodata.city.name),
                        "latitude": latitude,
                        "longitude": longitude,
                        "player_state": player_state,
                        "device_type": session.platform,
                        "server": self.server.id
                    },
                    "time": self.now,
                    "fields": {
                        "hash": hash_id
                    }
                }
            )

        influx_payload.append(
            {
                "measurement": "Tautulli",
                "tags": {
                    "type": "current_stream_stats",
                    "server": self.server.id
                },
                "time": self.now,
                "fields": {
                    "stream_count": int(get['stream_count']),
                    "total_bandwidth": int(get['total_bandwidth']),
                    "wan_bandwidth": int(get['wan_bandwidth']),
                    "lan_bandwidth": int(get['lan_bandwidth']),
                    "transcode_streams": int(get['stream_count_transcode']),
                    "direct_play_streams": int(get['stream_count_direct_play']),
                    "direct_streams": int(get['stream_count_direct_stream'])
                }
            }
        )

        self.dbmanager.write_points(influx_payload)
예제 #21
0
파일: tautulli.py 프로젝트: arnesweb/Varken
    def get_activity(self):
        now = datetime.now(timezone.utc).astimezone().isoformat()
        influx_payload = []
        params = {'cmd': 'get_activity'}

        req = self.session.prepare_request(
            Request('GET', self.server.url + self.endpoint, params=params))
        g = connection_handler(self.session, req, self.server.verify_ssl)

        if not g:
            return

        get = g['response']['data']

        # Remove erroneous key from sessions
        for session in get['sessions']:
            if session.get('_cache_time'):
                del session['_cache_time']

        try:
            sessions = [
                TautulliStream(**session) for session in get['sessions']
            ]
        except TypeError as e:
            self.logger.error(
                'TypeError has occurred : %s while creating TautulliStream structure',
                e)
            return

        for session in sessions:
            # Check to see if ip_address_public attribute exists as it was introduced in v2
            try:
                getattr(session, 'ip_address_public')
            except AttributeError:
                self.logger.error(
                    'Public IP attribute missing!!! Do you have an old version of Tautulli (v1)?'
                )
                exit(1)

            try:
                geodata = self.geoiphandler.lookup(session.ip_address_public)
            except (ValueError, AddressNotFoundError):
                self.logger.debug('Public IP missing for Tautulli session...')
                if not self.my_ip:
                    # Try the fallback ip in the config file
                    try:
                        self.logger.debug(
                            'Atempting to use the failback IP...')
                        geodata = self.geoiphandler.lookup(
                            self.server.fallback_ip)
                    except AddressNotFoundError as e:
                        self.logger.error('%s', e)

                        self.my_ip = self.session.get(
                            'http://ip.42.pl/raw').text
                        self.logger.debug(
                            'Looked the public IP and set it to %s',
                            self.my_ip)

                        geodata = self.geoiphandler.lookup(self.my_ip)

                else:
                    geodata = self.geoiphandler.lookup(self.my_ip)

            if not all([geodata.location.latitude, geodata.location.longitude
                        ]):
                latitude = 37.234332396
                longitude = -115.80666344
            else:
                latitude = geodata.location.latitude
                longitude = geodata.location.longitude

            decision = session.transcode_decision
            if decision == 'copy':
                decision = 'direct stream'

            video_decision = session.stream_video_decision
            if video_decision == 'copy':
                video_decision = 'direct stream'
            elif video_decision == '':
                video_decision = 'Music'

            quality = session.stream_video_resolution
            if not quality:
                quality = session.container.upper()
            elif quality in ('SD', 'sd', '4k'):
                quality = session.stream_video_resolution.upper()
            else:
                quality = session.stream_video_resolution + 'p'

            player_state = session.state.lower()
            if player_state == 'playing':
                player_state = 0
            elif player_state == 'paused':
                player_state = 1
            elif player_state == 'buffering':
                player_state = 3

            product_version = session.product_version
            if session.platform == 'Roku':
                product_version = session.product_version.split('-')[0]

            hash_id = hashit(
                f'{session.session_id}{session.session_key}{session.username}{session.full_title}'
            )
            influx_payload.append({
                "measurement": "Tautulli",
                "tags": {
                    "type": "Session",
                    "session_id": session.session_id,
                    "friendly_name": session.friendly_name,
                    "username": session.username,
                    "title": session.full_title,
                    "platform": session.platform,
                    "product_version": product_version,
                    "quality": quality,
                    "video_decision": video_decision.title(),
                    "transcode_decision": decision.title(),
                    "transcode_hw_decoding": session.transcode_hw_decoding,
                    "transcode_hw_encoding": session.transcode_hw_encoding,
                    "media_type": session.media_type.title(),
                    "audio_codec": session.audio_codec.upper(),
                    "audio_profile": session.audio_profile.upper(),
                    "stream_audio_codec": session.stream_audio_codec.upper(),
                    "quality_profile": session.quality_profile,
                    "progress_percent": session.progress_percent,
                    "region_code": geodata.subdivisions.most_specific.iso_code,
                    "location": geodata.city.name,
                    "full_location":
                    f'{geodata.subdivisions.most_specific.name} - {geodata.city.name}',
                    "latitude": latitude,
                    "longitude": longitude,
                    "player_state": player_state,
                    "device_type": session.platform,
                    "server": self.server.id
                },
                "time": now,
                "fields": {
                    "hash": hash_id
                }
            })

        influx_payload.append({
            "measurement": "Tautulli",
            "tags": {
                "type": "current_stream_stats",
                "server": self.server.id
            },
            "time": now,
            "fields": {
                "stream_count": int(get['stream_count']),
                "total_bandwidth": int(get['total_bandwidth']),
                "wan_bandwidth": int(get['wan_bandwidth']),
                "lan_bandwidth": int(get['lan_bandwidth']),
                "transcode_streams": int(get['stream_count_transcode']),
                "direct_play_streams": int(get['stream_count_direct_play']),
                "direct_streams": int(get['stream_count_direct_stream'])
            }
        })

        self.dbmanager.write_points(influx_payload)
예제 #22
0
    def get_calendar(self, query="Missing"):
        endpoint = '/api/v1/calendar'
        today = str(date.today())
        last_days = str(date.today() -
                        timedelta(days=self.server.missing_days))
        future = str(date.today() + timedelta(days=self.server.future_days))
        now = datetime.now(timezone.utc).astimezone().isoformat()
        if query == "Missing":
            params = {'start': last_days, 'end': today}
        else:
            params = {'start': today, 'end': future}
        influx_payload = []
        influx_albums = []

        req = self.session.prepare_request(
            Request('GET', self.server.url + endpoint, params=params))
        get = connection_handler(self.session, req, self.server.verify_ssl)

        if not get:
            return

        # Iteratively create a list of LidarrAlbum Objects from response json
        albums = []
        for album in get:
            try:
                albums.append(LidarrAlbum(**album))
            except TypeError as e:
                self.logger.error(
                    'TypeError has occurred : %s while creating LidarrAlbum structure for album. Data '
                    'attempted is: %s', e, album)

        # Add Album to missing list if album is not complete
        for album in albums:
            percent_of_tracks = album.statistics.get('percentOfTracks', 0)
            if percent_of_tracks != 100:
                influx_albums.append((
                    album.title, album.releaseDate, album.artist['artistName'],
                    album.id, percent_of_tracks,
                    f"{album.statistics.get('trackFileCount', 0)}/{album.statistics.get('trackCount', 0)}"
                ))

        for title, release_date, artist_name, album_id, percent_complete, complete_count in influx_albums:
            hash_id = hashit(f'{self.server.id}{title}{album_id}')
            influx_payload.append({
                "measurement": "Lidarr",
                "tags": {
                    "type": query,
                    "sonarrId": album_id,
                    "server": self.server.id,
                    "albumName": title,
                    "artistName": artist_name,
                    "percentComplete": percent_complete,
                    "completeCount": complete_count,
                    "releaseDate": release_date
                },
                "time": now,
                "fields": {
                    "hash": hash_id
                }
            })

        self.dbmanager.write_points(influx_payload)