Ejemplo n.º 1
0
def set_notify_state(newsletter, notify_action, subject, body, message,
                     filename, start_date, end_date, start_time, end_time,
                     newsletter_uuid, email_msg_id):

    if newsletter and notify_action:
        db = database.MonitorDatabase()

        keys = {'timestamp': helpers.timestamp(), 'uuid': newsletter_uuid}

        values = {
            'newsletter_id': newsletter['id'],
            'agent_id': newsletter['agent_id'],
            'agent_name': newsletter['agent_name'],
            'notify_action': notify_action,
            'subject_text': subject,
            'body_text': body,
            'message_text': message,
            'start_date': start_date,
            'end_date': end_date,
            'start_time': start_time,
            'end_time': end_time,
            'email_msg_id': email_msg_id,
            'filename': filename
        }

        db.upsert(table_name='newsletter_log',
                  key_dict=keys,
                  value_dict=values)
        return db.last_insert_id()
    else:
        logger.error(
            "Tautulli NewsletterHandler :: Unable to set notify state.")
Ejemplo n.º 2
0
def github_cache(cache, github_data=None, use_cache=True):
    timestamp = helpers.timestamp()
    cache_filepath = os.path.join(plexpy.CONFIG.CACHE_DIR,
                                  'github_{}.json'.format(cache))

    if github_data:
        cache_data = {
            'github_data': github_data,
            '_cache_time': timestamp,
            '_release_version': common.RELEASE
        }
        try:
            with open(cache_filepath, 'w', encoding='utf-8') as cache_file:
                json.dump(cache_data, cache_file)
        except:
            pass
    else:
        if not use_cache:
            return
        try:
            with open(cache_filepath, 'r', encoding='utf-8') as cache_file:
                cache_data = json.load(cache_file)
            if (timestamp - cache_data['_cache_time'] <
                    plexpy.CONFIG.CHECK_GITHUB_CACHE_SECONDS
                    and cache_data['_release_version'] == common.RELEASE):
                logger.debug('Using cached GitHub %s data', cache)
                return cache_data['github_data']
        except:
            pass
Ejemplo n.º 3
0
def check_rate_limit(ip_address):
    monitor_db = MonitorDatabase()
    result = monitor_db.select('SELECT timestamp, success FROM user_login '
                               'WHERE ip_address = ? '
                               'AND timestamp >= ( '
                               'SELECT CASE WHEN MAX(timestamp) IS NULL THEN 0 ELSE MAX(timestamp) END '
                               'FROM user_login WHERE ip_address = ? AND success = 1) '
                               'ORDER BY timestamp DESC',
                               [ip_address, ip_address])

    try:
        last_timestamp = result[0]['timestamp']
    except IndexError:
        last_timestamp = 0

    try:
        last_success = max(login['timestamp'] for login in result if login['success'])
    except ValueError:
        last_success = 0

    max_timestamp = max(last_success, last_timestamp - plexpy.CONFIG.HTTP_RATE_LIMIT_ATTEMPTS_INTERVAL)
    attempts = [login for login in result if login['timestamp'] >= max_timestamp and not login['success']]

    if len(attempts) >= plexpy.CONFIG.HTTP_RATE_LIMIT_ATTEMPTS:
        return max(last_timestamp - (timestamp() - plexpy.CONFIG.HTTP_RATE_LIMIT_LOCKOUT_TIME), 0)
Ejemplo n.º 4
0
Archivo: views.py Proyecto: ganap/so
 def dispatch(self, request, *args, **kwargs):
     if self.site_pref.use_shutdown and \
             self.site_pref.shutdown_date_start < helpers.timestamp():
         return redirect('/sh/')
     if not request.user.is_authenticated():
         return redirect("/")
     return super(WebRTCOne2One, self).dispatch(request, *args, **kwargs)
Ejemplo n.º 5
0
Archivo: views.py Proyecto: ganap/so
 def dispatch(self, request, *args, **kwargs):
     if self.site_pref.use_shutdown and \
             self.site_pref.shutdown_date_start < helpers.timestamp():
         return redirect('/sh/')
     if not request.user.is_authenticated():
         return redirect("/")
     return super(WebRTCOne2One, self).dispatch(request, *args, **kwargs)
Ejemplo n.º 6
0
    def set_session_last_paused(self, session_key=None, timestamp=None):
        if str(session_key).isdigit():
            result = self.db.select(
                'SELECT last_paused, paused_counter '
                'FROM sessions '
                'WHERE session_key = ?',
                args=[session_key])

            paused_counter = None
            for session in result:
                if session['last_paused']:
                    paused_offset = helpers.timestamp() - int(
                        session['last_paused'])
                    if session['paused_counter']:
                        paused_counter = int(
                            session['paused_counter']) + int(paused_offset)
                    else:
                        paused_counter = int(paused_offset)

            values = {'last_paused': timestamp}

            if paused_counter:
                values['paused_counter'] = paused_counter

            keys = {'session_key': session_key}
            self.db.upsert('sessions', values, keys)
Ejemplo n.º 7
0
    def on_buffer(self):
        if self.is_valid_session():
            logger.debug("Tautulli ActivityHandler :: Session %s is buffering." % self.get_session_key())
            ap = activity_processor.ActivityProcessor()
            db_stream = ap.get_session_by_key(session_key=self.get_session_key())

            # Increment our buffer count
            ap.increment_session_buffer_count(session_key=self.get_session_key())

            # Get our current buffer count
            current_buffer_count = ap.get_session_buffer_count(self.get_session_key())
            logger.debug("Tautulli ActivityHandler :: Session %s buffer count is %s." %
                         (self.get_session_key(), current_buffer_count))

            # Get our last triggered time
            buffer_last_triggered = ap.get_session_buffer_trigger_time(self.get_session_key())

            # Update the session state and viewOffset
            self.update_db_session()

            time_since_last_trigger = 0
            if buffer_last_triggered:
                logger.debug("Tautulli ActivityHandler :: Session %s buffer last triggered at %s." %
                             (self.get_session_key(), buffer_last_triggered))
                time_since_last_trigger = helpers.timestamp() - int(buffer_last_triggered)

            if current_buffer_count >= plexpy.CONFIG.BUFFER_THRESHOLD and time_since_last_trigger == 0 or \
                    time_since_last_trigger >= plexpy.CONFIG.BUFFER_WAIT:
                ap.set_session_buffer_trigger_time(session_key=self.get_session_key())

                # Retrieve the session data from our temp table
                db_session = ap.get_session_by_key(session_key=self.get_session_key())

                plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_buffer'})
Ejemplo n.º 8
0
    def set_user_login(self,
                       user_id=None,
                       user=None,
                       user_group=None,
                       ip_address=None,
                       host=None,
                       user_agent=None,
                       success=0):

        if user_id is None or str(user_id).isdigit():
            monitor_db = database.MonitorDatabase()

            keys = {'timestamp': helpers.timestamp(), 'user_id': user_id}

            values = {
                'user': user,
                'user_group': user_group,
                'ip_address': ip_address,
                'host': host,
                'user_agent': user_agent,
                'success': success
            }

            try:
                monitor_db.upsert(table_name='user_login',
                                  key_dict=keys,
                                  value_dict=values)
            except Exception as e:
                logger.warn(
                    "Tautulli Users :: Unable to execute database query for set_login_log: %s."
                    % e)
Ejemplo n.º 9
0
Archivo: views.py Proyecto: ganap/so
 def dispatch(self, request, *args, **kwargs):
     if self.site_pref.use_shutdown and \
             self.site_pref.shutdown_date_start < helpers.timestamp():
         return redirect('/sh/')
     run_once = getattr(request.user, 'run_once', False)
     if request.user.is_authenticated() and not run_once:
         return redirect("/s/")
     return super(MainPageRunOnce, self).dispatch(request, *args, **kwargs)
Ejemplo n.º 10
0
Archivo: views.py Proyecto: ganap/so
 def dispatch(self, request, *args, **kwargs):
     if self.site_pref.use_shutdown and \
             self.site_pref.shutdown_date_start < helpers.timestamp():
         return redirect('/sh/')
     run_once = getattr(request.user, 'run_once', False)
     if request.user.is_authenticated() and not run_once:
         return redirect("/s/")
     return super(MainPageRunOnce, self).dispatch(request, *args, **kwargs)
Ejemplo n.º 11
0
def save_xy(x_data, y_data, save_to = '', time_stamp = True, plot = True):

	# x_data: numpy array of x-axis
	# y_data: either numpy array of single y-data, or list of multiple numpy
	# arrays of y-data
	# When save_to is empty, saved data will be named by the time_stamp. 

	if save_to == '' and not time_stamp:
		message = (' save_xy(x_data, y_data, save_to = \'\', '
			+ 'time_stamp = True, plot = True)\n'
			+ '      --->  '
			+ 'Empty save_to and time_stamp = False.  Nothing to name file.')
		raise Exception(message)

	if type(y_data).__name__ == 'list':	
		if plot:
			fig, ax = plt.subplots(1, figsize = (5, 5) )
			for i in range(len(y_data)):
				ax.plot(x_data, y_data[i])
			fig.tight_layout()
			if time_stamp:
				fig.savefig(save_to + timestamp() + '.png', dpi = 100)
			else:
				fig.savefig(save_to + '.png', dpi = 100)
			plt.close(fig)
		np.save(save_to + timestamp(), [x_data, y_data] )
		return 0

	elif type(y_data).__name__ == 'ndarray':
		if plot:
			fig, ax = plt.subplots(1, figsize = (5, 5) )
			ax.plot(x_data, y_data)
			fig.tight_layout()
			if time_stamp:
				fig.savefig(save_to + timestamp() + '.png', dpi = 100)
			else:
				fig.savefig(save_to + '.png', dpi = 100)
			plt.close(fig)
		np.save(save_to + timestamp(), [x_data, y_data])
		return 0

	else:
		raise TypeError(
		'y_data of type {} not accepted.  Must be of type list or numpy.ndarray'
			.format(type(y_data).__name__))
Ejemplo n.º 12
0
Archivo: views.py Proyecto: ganap/so
 def dispatch(self, request, *args, **kwargs):
     if self.site_pref.use_shutdown and \
             self.site_pref.shutdown_date_start < helpers.timestamp():
         return redirect('/sh/')
     if request.user.is_authenticated():
         if not request.user.run_once:
             return redirect("/s/")
         return redirect("/ro/")
     return super(LoginPage, self).dispatch(request, *args, **kwargs)
Ejemplo n.º 13
0
    def __init__(self,stream=None):
	if not stream:
            file_prefix = os.path.basename(sys.argv[0]).split(".")[0].replace("test_", "")
            filename = timestamp("%s_results.txt" % file_prefix)
            self.stream = _WritelnDecorator(open(filename, "a+"))
	else:
	    self.stream = _WritelnDecorator(stream)
        self.descriptions = 0
        self.verbosity = 2
Ejemplo n.º 14
0
    def _delay_scrape(self):
        """Sleeps for a random amount of time, between 0 and _SLEEP_MAX. Should 
        be called before every page request, so that not too many requests are
        sent at once.
        """
        if self._last_scrape > 0:
            time.sleep(random.uniform(0.0, self._SLEEP_MAX))

        self._last_scrape = timestamp()
Ejemplo n.º 15
0
    def _delay_scrape(self):
        """Sleeps for a random amount of time, between 0 and _SLEEP_MAX. Should 
        be called before every page request, so that not too many requests are
        sent at once.
        """
        if self._last_scrape > 0:
            time.sleep(random.uniform(0.0, self._SLEEP_MAX))

        self._last_scrape = timestamp()
Ejemplo n.º 16
0
Archivo: views.py Proyecto: ganap/so
 def dispatch(self, request, *args, **kwargs):
     if self.site_pref.use_shutdown and \
             self.site_pref.shutdown_date_start < helpers.timestamp():
         return redirect('/sh/')
     if request.user.is_authenticated():
         if not request.user.run_once:
             return redirect("/s/")
         return redirect("/ro/")
     return super(LoginPage, self).dispatch(request, *args, **kwargs)
Ejemplo n.º 17
0
def set_last_seen(device_token=None):
    db = database.MonitorDatabase()
    last_seen = helpers.timestamp()

    try:
        result = db.action('UPDATE mobile_devices SET last_seen = ? WHERE device_token = ?',
                           args=[last_seen, device_token])
    except Exception as e:
        logger.warn("Tautulli MobileApp :: Failed to set last_seen time for device: %s." % e)
        return
Ejemplo n.º 18
0
Archivo: views.py Proyecto: ganap/so
 def dispatch(self, request, *args, **kwargs):
     is_admin = getattr(request.user, 'is_admin', False)
     is_moderator = getattr(request.user, 'is_moderator', False)
     is_expert = getattr(request.user, 'is_expert', False)
     if is_admin:
         return redirect('/a/')
     if is_moderator:
         return redirect('/m/')
     if is_expert:
         return redirect('/e/')
     if self.site_pref.use_shutdown and \
             self.site_pref.shutdown_date_start < helpers.timestamp():
         return redirect('/sh/')
     return super(MainPageAuthenticatedUsed, self).dispatch(request, *args, **kwargs)
Ejemplo n.º 19
0
def run_spacy(
    data_description,
    directory,
    extract_features,
    filter,
    model=None,
    prune_test_data=False,
):
    last_time = time.time()

    # Parse data
    spacy_frames: List[Frame] = open_model("spacy_parse", ".")

    send_email(
        directory,
        f"Starting pipeline for data parsed with spaCy",
        email_address,
        send_mail,
    )

    # Send the data to the pipeline
    result = pipeline(directory,
                      spacy_frames,
                      "spacy",
                      extract_features,
                      filter,
                      log_data=log_data,
                      prune_test_data=prune_test_data)

    # Present data
    send_email(
        directory,
        f"Pipeline for data parsed with spaCy compleate. \nResult:\n{result}",
        email_address,
        send_mail,
    )
    timestamp(last_time, "spaCy pipeline: ")
Ejemplo n.º 20
0
Archivo: views.py Proyecto: ganap/so
 def dispatch(self, request, *args, **kwargs):
     is_admin = getattr(request.user, 'is_admin', False)
     is_moderator = getattr(request.user, 'is_moderator', False)
     is_expert = getattr(request.user, 'is_expert', False)
     if is_admin:
         return redirect('/a/')
     if is_moderator:
         return redirect('/m/')
     if is_expert:
         return redirect('/e/')
     if self.site_pref.use_shutdown and \
             self.site_pref.shutdown_date_start < helpers.timestamp():
         return redirect('/sh/')
     return super(MainPageAuthenticatedUsed,
                  self).dispatch(request, *args, **kwargs)
Ejemplo n.º 21
0
def run_malt(
    data_description,
    directory,
    extract_features,
    filter,
    model=None,
    prune_test_data=False,
):
    last_time = time.time()

    send_email(
        directory,
        f"Starting pipeline for data parsed with Maltparser",
        email_address,
        send_mail,
    )

    # Parse data
    malt_frames = parse_malt()

    # Send the data to the pipeline
    result = pipeline(directory,
                      malt_frames,
                      "malt",
                      extract_features,
                      filter,
                      log_data=log_data,
                      prune_test_data=prune_test_data)
    # Present data
    send_email(
        directory,
        f"Pipeline for data parsed with Maltparser compleate. \nResult:\n{result}",
        email_address,
        send_mail,
    )
    timestamp(last_time, "Malt pipeline: ")
Ejemplo n.º 22
0
    def on_pause(self, still_paused=False):
        if self.is_valid_session():
            if not still_paused:
                logger.debug("Tautulli ActivityHandler :: Session %s paused." % str(self.get_session_key()))

            # Set the session last_paused timestamp
            ap = activity_processor.ActivityProcessor()
            ap.set_session_last_paused(session_key=self.get_session_key(), timestamp=helpers.timestamp())

            # Update the session state and viewOffset
            self.update_db_session()

            # Retrieve the session data from our temp table
            db_session = ap.get_session_by_key(session_key=self.get_session_key())

            if not still_paused:
                plexpy.NOTIFY_QUEUE.put({'stream_data': db_session.copy(), 'notify_action': 'on_pause'})
Ejemplo n.º 23
0
    def show_dashboard(self):
        import helpers
        import settings
            
        template_values = {
            "teacher_name"  :self.person.nickname,
            "header"        : self.gen_header("teacher"),
            "lessons_json"  : self.get_lessons_json(),
            "admin"         : False,
# Adding admin functionality to dashboard in progress
#            "lessons_json"  : self.get_lessons_json(True if self.person.admin else False),
#            "admin"         : self.person.admin,
            "dbg_timestamp" : (helpers.timestamp() if settings.ENABLE_FILLER_FORM_FILLING else "")
        }

        if self.session.has_key('msg'):
            template_values['msg'] = self.session.pop('msg')
                    
        self.write_response_with_template("teacher_dashboard.html", template_values)
Ejemplo n.º 24
0
def run(query, images_dir, n_images=20, image_size='MEDIUM'):
    # Run the query to the Google Custom Search API.
    print(
        timestamp() +
        ' Calling the Google Custom Search API, downloading images for the keyword "'
        + query + '" and saving them to storage')

    # Build a urllib opener to bypass website's blockade of the user-agent used by urllib.
    opener = urllib.request.build_opener()
    opener.addheaders = [('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64) '
                          'AppleWebKit/537.36 (KHTML, like Gecko) '
                          'Chrome/36.0.1941.0 '
                          'Safari/537.36')]
    urllib.request.install_opener(opener)

    # Build a service object for interacting with the API.
    service = build('customsearch',
                    'v1',
                    developerKey=settings.GOOGLE_API_KEY,
                    cache_discovery=False)

    # Call the API, parse the JSON response, download the images and cache them.
    for i in range(round(n_images / 10) + 1):
        response = search_images(service,
                                 query,
                                 size=image_size,
                                 start=i * 10 + 1)

        if 'items' in response:
            for item in response['items']:
                url = item['link']
                filename = item['link'].split('/')[-1]
                extension = filename.split('.')[-1].lower()
                if extension in ['png', 'jpg', 'jpeg', 'gif']:
                    try:
                        print(url)
                        filename = f'{hashlib.sha1(filename.encode("utf-8")).hexdigest()[:10]}.{extension}'
                        urllib.request.urlretrieve(url,
                                                   images_dir + '/' + filename)
                    except (urllib.error.URLError, ssl.CertificateError):
                        pass
Ejemplo n.º 25
0
 def get_or_create_Dialog(self, with_user_pk, user=None):
     if not user:
         user = self.user
     print ("GET DIALOG")
     print(with_user_pk, user.to_json())
     d = models.Dialog.objects(owner=user.owner,
                               dialog_with_user_pk=with_user_pk)
     print(d.to_json())
     print("================")
     if not d:
         with_user = models.User.objects(owner=with_user_pk)[0]
         d = models.Dialog(owner=user.owner,
                           dialog_with_user_pk=with_user_pk,
                           dialog_with_user={
                           'pk': with_user_pk,
                           'username': with_user.username,
                           },
                           last_msg_timestamp=helpers.timestamp())
         d.save()
         return d
     else:
         return d[0]
Ejemplo n.º 26
0
    def load(self, n_images_for_keyword=15):
        """
        Loads images using google api for search words specified in categories_file and saves them in self.output_directory

        :param n_images_for_keyword: Number of images loaded for every keyword from categories_file
        """
        with open(self.categories_file, 'r') as file:
            categories_data = json.load(file)

        for category in categories_data:
            images_dir = self.output_directory + '/' + category['name']
            n_images = n_images_for_keyword

            # Make directory if it doesn't exist
            if not os.path.exists(images_dir):
                print(timestamp() + ' Creating the directory "' + images_dir +
                      '" and downloading the content')
                os.makedirs(images_dir)

            # Load images for all queries
            for query in category["images_keywords"]:
                google_api.run(query, images_dir, n_images=n_images)
Ejemplo n.º 27
0
    def get_watch_time_stats(self,
                             user_id=None,
                             grouping=None,
                             query_days=None):
        if not session.allow_session_user(user_id):
            return []

        if grouping is None:
            grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES

        if query_days and query_days is not None:
            query_days = map(helpers.cast_to_int, str(query_days).split(','))
        else:
            query_days = [1, 7, 30, 0]

        timestamp = helpers.timestamp()

        monitor_db = database.MonitorDatabase()

        user_watch_time_stats = []

        group_by = 'reference_id' if grouping else 'id'

        for days in query_days:
            timestamp_query = timestamp - days * 24 * 60 * 60

            try:
                if days > 0:
                    if str(user_id).isdigit():
                        query = 'SELECT (SUM(stopped - started) - ' \
                                '   SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, ' \
                                'COUNT(DISTINCT %s) AS total_plays ' \
                                'FROM session_history ' \
                                'WHERE stopped >= %s ' \
                                'AND user_id = ? ' % (group_by, timestamp_query)
                        result = monitor_db.select(query, args=[user_id])
                    else:
                        result = []
                else:
                    if str(user_id).isdigit():
                        query = 'SELECT (SUM(stopped - started) - ' \
                                '   SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, ' \
                                'COUNT(DISTINCT %s) AS total_plays ' \
                                'FROM session_history ' \
                                'WHERE user_id = ? ' % group_by
                        result = monitor_db.select(query, args=[user_id])
                    else:
                        result = []
            except Exception as e:
                logger.warn(
                    "Tautulli Users :: Unable to execute database query for get_watch_time_stats: %s."
                    % e)
                result = []

            for item in result:
                if item['total_time']:
                    total_time = item['total_time']
                    total_plays = item['total_plays']
                else:
                    total_time = 0
                    total_plays = 0

                row = {
                    'query_days': days,
                    'total_time': total_time,
                    'total_plays': total_plays
                }

                user_watch_time_stats.append(row)

        return user_watch_time_stats
Ejemplo n.º 28
0
    def handle(self, *args, **options):
        mongodb.MongoDbConnect()
        self.stdout.write(
            "::::Regenerate Country And City list files in /static/gen/L18n/country-city/")
        locales = models.Locales.objects()
        locale_suffixes = []
        for locale in locales:
            locale_suffixes.append(locale.alias)
        #
        #   Get all countries and save them to files
        #

        countries_locales = {}
        for s in locale_suffixes:
            countries_locales[s] = {
                #   1: 'France',
                #   ...
            }

        for country in models.DjCountry.objects.all():
            pk = str(country.pk)
            for locale in locale_suffixes:
                names = country.name_locale

                name = names.get(locale, None)
                if name:
                    countries_locales[locale][pk] = {
                        'n': name,
                        'c': country.code
                    }
                else:
                    countries_locales[locale][pk] = {
                        'n': country.name,
                        'c': country.code
                    }

        for locale in locale_suffixes:
            file_path = AUTO_GEN_DIR + "L18n/country-city/country-list."\
                + locale + ".json"
            f = open(file_path, "w")
            data = json.dumps(countries_locales[locale])
            data = rm_spaces(data)
            f.write(data)
            f.close()
            gz_compress_file(file_path)

        self.stdout.write("::::County list      [DONE]")
        #
        #   Get all City from country and save them
        #   in folder with Country.pk
        #
        for country in models.DjCountry.objects.all():
            pk = str(country.pk)
            country_dir = AUTO_GEN_DIR + "L18n/country-city/" + pk + "/"
            if not os.path.isdir(country_dir):
                os.mkdir(country_dir)
            for locale in locale_suffixes:
                city_list_export = []
                city_list = models.DjCity.objects.filter(country=country)
                for city in city_list:
                    city_pk = city.pk
                    names = city.name_locale
                    name = names.get(locale, None)
                    if name:
                        city_list_export.append({"p": city_pk, "n": name})
                    else:
                        city_list_export.append({"p": city_pk, "n": city.name})

                self.stdout.write(repr(city_list_export))
                file_path = country_dir + "city-list." + locale + ".json"
                f = open(file_path, "w")
                data = rm_spaces(json.dumps(city_list_export))
                f.write(data)
                f.close()
                gz_compress_file(file_path)
            self.stdout.write("::::City list for "
                              + country.name + "         [DONE]")

        timestamp = helpers.timestamp()
        f = open(AUTO_GEN_DIR + "L18n/country-city/autogen.timestamp", "w")
        f.write(str(timestamp))
        f.close()
        self.stdout.write("::::END")
Ejemplo n.º 29
0
	def _send_tab_delimited_report(self, lesson_code, utc_offset):
		import StringIO
		from model import Student, StudentActivity, Lesson
		import helpers
		encoding = "UTF-8"

		lesson = Lesson.get_by_key_name(lesson_code)
		assert lesson is not None

		if lesson is None or lesson.teacher_key != self.teacher_key:
			self.write_response_plain_text("ERROR:  Lesson code appears to be incorrect.")
		else:

			students = Student.fetch_all("lesson =", lesson)
			task_titles = tuple(task_info[0] for task_info in lesson.tasks)
			student_key_to_nickname = dict((s.key().name(), s.nickname) for s in students)
			activities = StudentActivity.fetch_all("lesson =", lesson)

			report_buffer = StringIO.StringIO()
			excel_writer = UnicodeWriter(report_buffer, "excel-tab", "utf8")

			headers = (
#					"Lesson_Code",
					"Timestamp",
					"Student",
					"Task_Number",
					"Task_Name",
					"Activity_Type",
					"Query",
					"Link_URL",
					"Link_Title",
					"Is_Helpful",
					"Answer_Text",
					"Answer_Explanation"
			)
			excel_writer.writerow(headers)

			for activity in activities:
				student_key = activity.student_key.name()
				student_nickname = student_key_to_nickname[student_key]
				timestamp = (activity.timestamp - utc_offset).strftime("%m/%d/%Y %H:%M:%S")
				task_idx = activity.task_idx
				task_title = task_titles[task_idx]
				task_num = task_idx + 1
				line_parts = (
#						lesson_code,
						timestamp,
						student_nickname,
						task_num,
						task_title,
						activity.activity_type,
						activity.search,
						activity.link,
						activity.link_title,
						activity.is_helpful,
						activity.answer_text,
						activity.answer_explanation
				)
#				line_parts = tuple(unicode(p).encode("utf8") for p in line_parts)
				excel_writer.writerow(line_parts)
			report_text = report_buffer.getvalue()
			report_buffer.close()

			content_type = "text/tab-separated-values"
			filename = "search_party_lesson_%s_activity_as_of_%s.txt"%(lesson_code, helpers.timestamp())
			self.write_response_as_file(encoded_content=report_text, content_type=content_type, filename=filename, encoding=encoding)
Ejemplo n.º 30
0
    def process(self):
        if self.is_valid_session():
            ap = activity_processor.ActivityProcessor()
            db_session = ap.get_session_by_key(
                session_key=self.get_session_key())

            this_state = self.timeline['state']
            this_rating_key = str(self.timeline['ratingKey'])
            this_key = self.timeline['key']
            this_transcode_key = self.timeline.get('transcodeSession', '')

            # Get the live tv session uuid
            this_live_uuid = this_key.split('/')[-1] if this_key.startswith(
                '/livetv/sessions') else None

            # If we already have this session in the temp table, check for state changes
            if db_session:
                # Re-schedule the callback to reset the 5 minutes timer
                schedule_callback(
                    'session_key-{}'.format(self.get_session_key()),
                    func=force_stop_stream,
                    args=[
                        self.get_session_key(), db_session['full_title'],
                        db_session['user']
                    ],
                    minutes=5)

                last_state = db_session['state']
                last_rating_key = str(db_session['rating_key'])
                last_live_uuid = db_session['live_uuid']
                last_transcode_key = db_session['transcode_key'].split('/')[-1]
                last_paused = db_session['last_paused']
                last_rating_key_websocket = db_session['rating_key_websocket']
                last_guid = db_session['guid']

                this_guid = last_guid
                # Check guid for live TV metadata every 60 seconds
                if db_session['live'] and helpers.timestamp(
                ) - db_session['stopped'] > 60:
                    metadata = self.get_metadata(skip_cache=True)
                    if metadata:
                        this_guid = metadata['guid']

                # Make sure the same item is being played
                if (this_rating_key == last_rating_key
                        or this_rating_key == last_rating_key_websocket
                        or this_live_uuid == last_live_uuid) \
                        and this_guid == last_guid:
                    # Update the session state and viewOffset
                    if this_state == 'playing':
                        # Update the session in our temp session table
                        # if the last set temporary stopped time exceeds 60 seconds
                        if helpers.timestamp() - db_session['stopped'] > 60:
                            self.update_db_session()

                    # Start our state checks
                    if this_state != last_state:
                        if this_state == 'paused':
                            self.on_pause()
                        elif last_paused and this_state == 'playing':
                            self.on_resume()
                        elif this_state == 'stopped':
                            self.on_stop()
                        elif this_state == 'error':
                            self.on_error()

                    elif this_state == 'paused':
                        # Update the session last_paused timestamp
                        self.on_pause(still_paused=True)

                    if this_state == 'buffering':
                        self.on_buffer()

                    if this_transcode_key != last_transcode_key and this_state != 'stopped':
                        self.on_change()

                # If a client doesn't register stop events (I'm looking at you PHT!) check if the ratingKey has changed
                else:
                    # Manually stop and start
                    # Set force_stop so that we don't overwrite our last viewOffset
                    self.on_stop(force_stop=True)
                    self.on_start()

                # Monitor if the stream has reached the watch percentage for notifications
                # The only purpose of this is for notifications
                if not db_session['watched'] and this_state != 'buffering':
                    progress_percent = helpers.get_percent(
                        self.timeline['viewOffset'], db_session['duration'])
                    watched_percent = {
                        'movie': plexpy.CONFIG.MOVIE_WATCHED_PERCENT,
                        'episode': plexpy.CONFIG.TV_WATCHED_PERCENT,
                        'track': plexpy.CONFIG.MUSIC_WATCHED_PERCENT,
                        'clip': plexpy.CONFIG.TV_WATCHED_PERCENT
                    }

                    if progress_percent >= watched_percent.get(
                            db_session['media_type'], 101):
                        logger.debug(
                            "Tautulli ActivityHandler :: Session %s watched." %
                            str(self.get_session_key()))
                        ap.set_watched(session_key=self.get_session_key())

                        watched_notifiers = notification_handler.get_notify_state_enabled(
                            session=db_session,
                            notify_action='on_watched',
                            notified=False)

                        for d in watched_notifiers:
                            plexpy.NOTIFY_QUEUE.put({
                                'stream_data':
                                db_session.copy(),
                                'notifier_id':
                                d['notifier_id'],
                                'notify_action':
                                'on_watched'
                            })

            else:
                # We don't have this session in our table yet, start a new one.
                if this_state != 'buffering':
                    self.on_start()
Ejemplo n.º 31
0
 def set_session_state(self):
     ap = activity_processor.ActivityProcessor()
     ap.set_session_state(session_key=self.get_session_key(),
                          state=self.timeline['state'],
                          view_offset=self.timeline['viewOffset'],
                          stopped=helpers.timestamp())
Ejemplo n.º 32
0
                        default=1,
                        type=int,
                        help='Number of steps of the convergence test.')
    args = parser.parse_args()
    testFilename = str(args.f)
    layerDepth = int(args.s)

    print("\n### TESTING " + testFilename + "\n")
    if testFilename[-3:] != ".py":
        testFilename += ".py"
    os.system("cp " + testFilename + " testConfiguration.py")
    from testConfiguration import CONFIGURATIONS, KERNELS, LOADS

    os.makedirs("results", exist_ok=True)
    pp = PdfPages("results/plots.pdf")
    tmpstmp = helpers.timestamp()
    fileHandle = open("results/rates" + tmpstmp + ".md", "w+")

    for k, kernel in enumerate(KERNELS):
        load = LOADS[k]
        fileHandle.write("# Kernel: " + kernel["function"] + "\n")
        for conf in CONFIGURATIONS:
            data = runTest(conf, kernel, load, layerDepth, pp)
            helpers.append_output(data,
                                  conf,
                                  kernel,
                                  load,
                                  fileHandle=fileHandle)
    fileHandle.close()
    pp.close()
    subprocess.run(
Ejemplo n.º 33
0
REFRESH_REQUEST_TIMEOUT = 30  # seconds
REFRESH_TOTAL_RETRIES = 3
DELAY_MIN = 0  # seconds
DELAY_MAX = 0  # seconds
DELAY_FRACTION = 999999

# Connection settings
USER_AGENT = 'Mozilla/5.0 (X11; Fedora; Linux x86_64; rv:57.0) Gecko/20100101 Firefox/57.0'
ANDHRA_URL = 'http://ceoaperms.ap.gov.in/electoral_rolls/Rolls.aspx'
ANDHRA_BASE_URL = 'http://ceoaperms.ap.gov.in/electoral_rolls/'

# Parsing & saving
FIND_PDF_REGEX = re.compile(r"open\('(.+)',")
OUTPUT_FILE = getpath(
    ANDHRA_TRACK_DIR, 'Andhra{}-{}.csv'.format(ASSIGNED_ID or '',
                                               timestamp(TRACK_FILE_TS)))
DOWNLOAD_FAILED = 'Not available / Unable to download'
TRACK_FILE = getpath('cache/andhra{}_track.bin'.format(ASSIGNED_ID or ''))
CSV_HEADER = ('district_name', 'ac_name', 'polling_station_number',
              'polling_station_name', 'polling_station_location',
              'telugu_file_name', 'eng_file_name')

# Log settings
MAX_LOG_SIZE = 52428800
LOG_BACKUP_COUNT = 5
LOG_FILE = getpath('logs/andhra{}.log'.format(ASSIGNED_ID or ''))


def log_configurer():
    root = logging.getLogger()
    root.setLevel(logging.WARNING)
Ejemplo n.º 34
0
 def __init__(self, event):
     self.timestamp = timestamp(event)
     self.sourceType = source_type(event)
     self.sourceUserId = source_user_id(event)
     self.sourceGroupId = source_group_id(event)
Ejemplo n.º 35
0
def main():
    start = time.time()
    ##### Run variables #####
    # If the data should be pruned as a part of the evaluation
    pruning_test_data = True
    # Filter the data used in both training and testing
    filter = {"min_sentences": 0, "min_role_occurrence": 6, "prune": 1}
    # Features of data to use
    features_ = {
        "",
        # "frame",
        # "core_elements",
        "word",
        "lemma",
        "pos",
        # "deprel",
        "ref",
        # "lu_words",
        # "lu_lemmas",
        # "lu_deprels",
        # "lu_pos",
        # "head_word",
        # "head_lemma",
        # "head_deprel",
        # "head_pos",
        # "child_words",
        # "child_lemmas",
        # "child_deprels",
        # "child_pos",
    }

    if not os.path.isfile('spacy_parse.pkl'):
        try:
            frames = parse_spacy()
            save_model(frames, "spacy_parse", ".")
            send_email("Parsing spaCy",
                       f"Finished parsing spaCy and saved to model",
                       email_address, send_mail)
        except Exception as err:
            send_email("Parsing spaCy",
                       f"Error when parsing spaCy\n{str(err)}", email_address,
                       send_mail)
            quit()

    ######## RUNS ########
    # for feature in features_:
    features = features_.copy()
    # features.remove(feature)
    # Change this string to represent the data manipulation made
    now = datetime.now()
    dt_string = now.strftime("_%Y-%m-%d_%H-%M-%S")
    directory = f"runs/run{dt_string}"
    readable_time = now.strftime("%H:%M:%S %Y-%m-%d")

    # Description of run
    data_description = (
        f"Testing all features not generated by parser. Features: {feature}. \nlinearSVC. \n{features=}. \n{filter=}. \n{pruning_test_data=}. \nTime: {readable_time}\n"
    )

    if log_data:
        # Create new run folder
        try:
            os.mkdir(directory)
            f = open(directory + "/run_description.txt", "a")
            f.write(data_description)
            f.close()
        except:
            raise OSError(f"Unable to create directory {directory}")

    send_email(
        directory,
        f"New run started: \n{data_description}\n",
        email_address,
        send_mail,
    )

    run_malt(data_description,
             directory,
             features,
             filter,
             prune_test_data=pruning_test_data)
    run_spacy(data_description,
              directory,
              features,
              filter,
              prune_test_data=pruning_test_data)

    send_email("Finished runs", "Tests compleate :)", email_address, send_mail)
    timestamp(start, "Total time: ")
    quit()
Ejemplo n.º 36
0
from helpers import urlget, getpath, relpath, baseurl, urljoin, urldown, \
 timestamp, TRACK_FILE_TS, append_csv

ENGLISH = 'English'
MANIPURI = 'Manipuri'

MANIPUR_PDF_ENGLISH_DIR = 'manipur_pdfs/english'
MANIPUR_PDF_MANIPURI_DIR = 'manipur_pdfs/manipuri'
MANIPUR_TRACK_DIR = './'

ENGLISH_URL = 'http://www.ceomanipur.nic.in/ElectoralRolls/ElectoralRolls_English.html'
MANIPURI_URL = 'http://www.ceomanipur.nic.in/ElectoralRolls/ElectoralRolls_Manipuri.html'
CSV_HEADER = ('ac_number', 'ac_name', 'poll_station_number',
              'poll_station_name', 'language', 'relative_path')
OUTPUT_FILE = getpath(MANIPUR_TRACK_DIR,
                      'Manipur-%s.csv' % timestamp(TRACK_FILE_TS))


class Manipur:
    def __init__(self):
        self.rolls = [{
            'url': ENGLISH_URL,
            'lang': ENGLISH,
            'html': None,
            'data': []
        }, {
            'url': MANIPURI_URL,
            'lang': MANIPURI,
            'html': None,
            'data': []
        }]
Ejemplo n.º 37
0
def get_args():
    parser = ArgumentParser(description=APP_DESC, prog=APP_CMD)

    parser.add_argument('-f',
                        '--file',
                        metavar='FILE',
                        help='path to the PDF file that is to be parsed')

    parser.add_argument(
        '-d',
        '--dir',
        metavar='DIR',
        help=
        'path to the directory containing the PDF files that are to be parsed')

    parser.add_argument('-s',
                        '--state',
                        metavar='STATE',
                        help='state whose PDF rolls(s) are to be parsed')

    parser.add_argument('-l',
                        '--lang',
                        metavar='LANG',
                        default='english',
                        help='specify language used in parsing \
documents (default is English if not being specified)')

    parser.add_argument('-o',
                        '--out',
                        metavar='FILE',
                        default=OUTPUT_FILE.format(timestamp(OUTPUT_FILE_TS)),
                        help='\
specify output file for storing parsed result (must be \'.csv\' file). The default output file is \
\'Parsed-{timestamp}.csv\' and stored in the \'output\' directory')

    parser.add_argument('--resume',
                        action='store_true',
                        default=False,
                        help='allows the parsing to be \
resumed later if the program is stopped unexpectedly or intentionally. Only takes effect if it is applied to a directory'
                        )

    parser.add_argument('--version',
                        action='version',
                        version=APP_NAME_WITH_VERSION)

    parser.add_argument('--all-states',
                        action='store_true',
                        default=False,
                        help='show all the states that are supported and exit')

    args = parser.parse_args()

    if args.all_states:
        out = []
        chars = max(len(x) for x in states.keys())
        for state, langs in states.items():
            out.append('%-{}s : %s'.format(chars) % (state, ', '.join(langs)))
        parser.exit(message='States supported:\n%s\n' % '\n'.join(out))

    if not args.file and not args.dir or not args.state:
        parser.error(
            'the following arguments are required: -f/--file or -d/--dir, -s/--state'
        )

    if args.file and args.dir:
        parser.error('only accepts one of --file or --dir at a time')

    elif args.file:
        if not isfile(args.file):
            parser.error('file not found: %s' % args.file)
        if not ispdf(args.file):
            parser.error('file must be in PDF format')
        if args.resume:
            parser.error('parsing file does not allow --resume')

    elif args.dir:
        if not isdir(args.dir):
            parser.error('folder not found: %s' % args.dir)

    if args.state.lower() not in states:
        parser.error('unsupported state \'%s\'' % args.state)

    if args.lang and args.lang.lower() not in states[args.state.lower()]:
        parser.error('state \'%s\' does not have \'%s\' language' %
                     (args.state, args.lang))

    if dirname(args.out) and not isdir(dirname(args.out)):
        parser.error('output folder does not exist: %s' % dirname(args.out))

    if filext(args.out).lower() != 'csv':
        parser.error('output file must be \'.csv\'')

    args.path = args.file or args.dir
    args.state = args.state.lower()
    args.lang = args.lang.lower()

    return args
Ejemplo n.º 38
0
 def banner(self):
     banner = '\nLog started at: {:22}\n{}'.format(
         timestamp(LOG_TIME_FORMAT), '-' * 38)
     self.logger.warning(banner)
Ejemplo n.º 39
0
 def api_view(self):
     return dict(dob=timestamp(self.dob),
                 elapsed=self.elapsed,
                 state=self.get_current_state())
Ejemplo n.º 40
0
def buildLogger(filename, **kwds):
    """Build the basic logging facility.

    A typical call would be:

    buildLogger("somename.log")

    To enable debug logging:

    buildLogger("somename.log", debug=True)
    
    """
    global log_file_name
    testlogdir = setLogDir()
    #print "Test logs will be created in " + testlogdir
    datefmt='%H:%M:%S'
    logformat = '%(asctime)s : %(levelname)-7s : %(message)s'

    try:
        # build a new instance of the logger and add the general
        # format
        log = logging.getLogger()
        formatter = logging.Formatter(logformat, datefmt)

        # add a couple of high level log levels for other purposes
        ### log.output: for logging the output from a command
        logging.output = 51
        log.output = lambda msg, self=log, level=logging.output: self.log(level, msg)
        logging.addLevelName(logging.output, "OUTPUT")
        
        ### log.cmd: for logging the command send on a cli
        logging.cmd = 52
        log.cmd = lambda msg, self=log, level=logging.cmd: self.log(level, msg)
        logging.addLevelName(logging.cmd, "CMD")

        ### log.test: for logging the command send on a cli
        logging.test = 53
        log.test = lambda msg, self=log, level=logging.test: self.log(level, msg)
        logging.addLevelName(logging.test, "TEST")
        
        ### log.result: for logging the command send on a cli
        logging.result = 54
        log.result = lambda msg, self=log, level=logging.result: self.log(level, msg)
        logging.addLevelName(logging.result, "RESULT")

		
        # build the logfile handler
        filename = timestamp(filename)
        log_file_name = filename
        fh = logging.FileHandler(normpath(filename), "w")
        fh.setFormatter(formatter)
        log.addHandler(fh)

        # build the stdout handler
        try:
            if kwds['console']:            
                sh = logging.StreamHandler()
                sh.setFormatter(formatter)
                log.addHandler(sh)
        except KeyError:
            pass
            
        # set the log level
        try:
            if kwds['debug']:
                log.setLevel(logging.DEBUG)
            elif kwds['info']:
                log.setLevel(logging.INFO)
        except KeyError:
            log.setLevel(logging.WARN)

        return log

    except:
        raise
Ejemplo n.º 41
0
def check_active_sessions(ws_request=False):

    with monitor_lock:
        monitor_db = database.MonitorDatabase()
        monitor_process = activity_processor.ActivityProcessor()
        db_streams = monitor_process.get_sessions()

        # Clear the metadata cache
        for stream in db_streams:
            activity_handler.delete_metadata_cache(stream['session_key'])

        pms_connect = pmsconnect.PmsConnect()
        session_list = pms_connect.get_current_activity()

        logger.debug("Tautulli Monitor :: Checking for active streams.")

        if session_list:
            media_container = session_list['sessions']

            # Check our temp table for what we must do with the new streams
            for stream in db_streams:
                if any(d['session_key'] == str(stream['session_key'])
                       and d['rating_key'] == str(stream['rating_key'])
                       for d in media_container):
                    # The user's session is still active
                    for session in media_container:
                        if session['session_key'] == str(stream['session_key']) and \
                                session['rating_key'] == str(stream['rating_key']):
                            # The user is still playing the same media item
                            # Here we can check the play states
                            if session['state'] != stream['state']:
                                if session['state'] == 'paused':
                                    logger.debug(
                                        "Tautulli Monitor :: Session %s paused."
                                        % stream['session_key'])

                                    plexpy.NOTIFY_QUEUE.put({
                                        'stream_data':
                                        stream.copy(),
                                        'notify_action':
                                        'on_pause'
                                    })

                                if session['state'] == 'playing' and stream[
                                        'state'] == 'paused':
                                    logger.debug(
                                        "Tautulli Monitor :: Session %s resumed."
                                        % stream['session_key'])

                                    plexpy.NOTIFY_QUEUE.put({
                                        'stream_data':
                                        stream.copy(),
                                        'notify_action':
                                        'on_resume'
                                    })

                                if session['state'] == 'error':
                                    logger.debug(
                                        "Tautulli Monitor :: Session %s encountered an error."
                                        % stream['session_key'])

                                    plexpy.NOTIFY_QUEUE.put({
                                        'stream_data':
                                        stream.copy(),
                                        'notify_action':
                                        'on_error'
                                    })

                            if stream['state'] == 'paused' and not ws_request:
                                # The stream is still paused so we need to increment the paused_counter
                                # Using the set config parameter as the interval, probably not the most accurate but
                                # it will have to do for now. If it's a websocket request don't use this method.
                                paused_counter = int(
                                    stream['paused_counter']
                                ) + plexpy.CONFIG.MONITORING_INTERVAL
                                monitor_db.action(
                                    'UPDATE sessions SET paused_counter = ? '
                                    'WHERE session_key = ? AND rating_key = ?',
                                    [
                                        paused_counter, stream['session_key'],
                                        stream['rating_key']
                                    ])

                            if session[
                                    'state'] == 'buffering' and plexpy.CONFIG.BUFFER_THRESHOLD > 0:
                                # The stream is buffering so we need to increment the buffer_count
                                # We're going just increment on every monitor ping,
                                # would be difficult to keep track otherwise
                                monitor_db.action(
                                    'UPDATE sessions SET buffer_count = buffer_count + 1 '
                                    'WHERE session_key = ? AND rating_key = ?',
                                    [
                                        stream['session_key'],
                                        stream['rating_key']
                                    ])

                                # Check the current buffer count and last buffer to determine if we should notify
                                buffer_values = monitor_db.select(
                                    'SELECT buffer_count, buffer_last_triggered '
                                    'FROM sessions '
                                    'WHERE session_key = ? AND rating_key = ?',
                                    [
                                        stream['session_key'],
                                        stream['rating_key']
                                    ])

                                if buffer_values[0][
                                        'buffer_count'] >= plexpy.CONFIG.BUFFER_THRESHOLD:
                                    # Push any notifications -
                                    # Push it on it's own thread so we don't hold up our db actions
                                    # Our first buffer notification
                                    if buffer_values[0][
                                            'buffer_count'] == plexpy.CONFIG.BUFFER_THRESHOLD:
                                        logger.info(
                                            "Tautulli Monitor :: User '%s' has triggered a buffer warning."
                                            % stream['user'])
                                        # Set the buffer trigger time
                                        monitor_db.action(
                                            'UPDATE sessions '
                                            'SET buffer_last_triggered = strftime("%s","now") '
                                            'WHERE session_key = ? AND rating_key = ?',
                                            [
                                                stream['session_key'],
                                                stream['rating_key']
                                            ])

                                        plexpy.NOTIFY_QUEUE.put({
                                            'stream_data':
                                            stream.copy(),
                                            'notify_action':
                                            'on_buffer'
                                        })

                                    else:
                                        # Subsequent buffer notifications after wait time
                                        if helpers.timestamp() > buffer_values[0]['buffer_last_triggered'] + \
                                                plexpy.CONFIG.BUFFER_WAIT:
                                            logger.info(
                                                "Tautulli Monitor :: User '%s' has triggered multiple buffer warnings."
                                                % stream['user'])
                                            # Set the buffer trigger time
                                            monitor_db.action(
                                                'UPDATE sessions '
                                                'SET buffer_last_triggered = strftime("%s","now") '
                                                'WHERE session_key = ? AND rating_key = ?',
                                                [
                                                    stream['session_key'],
                                                    stream['rating_key']
                                                ])

                                            plexpy.NOTIFY_QUEUE.put({
                                                'stream_data':
                                                stream.copy(),
                                                'notify_action':
                                                'on_buffer'
                                            })

                                logger.debug(
                                    "Tautulli Monitor :: Session %s is buffering. Count is now %s. Last triggered %s."
                                    %
                                    (stream['session_key'],
                                     buffer_values[0]['buffer_count'],
                                     buffer_values[0]['buffer_last_triggered'])
                                )

                            # Check if the user has reached the offset in the media we defined as the "watched" percent
                            # Don't trigger if state is buffer as some clients push the progress to the end when
                            # buffering on start.
                            if session['state'] != 'buffering':
                                progress_percent = helpers.get_percent(
                                    session['view_offset'],
                                    session['duration'])
                                notify_states = notification_handler.get_notify_state(
                                    session=session)
                                if (session['media_type'] == 'movie' and progress_percent >= plexpy.CONFIG.MOVIE_WATCHED_PERCENT or
                                    session['media_type'] == 'episode' and progress_percent >= plexpy.CONFIG.TV_WATCHED_PERCENT or
                                    session['media_type'] == 'track' and progress_percent >= plexpy.CONFIG.MUSIC_WATCHED_PERCENT) \
                                    and not any(d['notify_action'] == 'on_watched' for d in notify_states):
                                    plexpy.NOTIFY_QUEUE.put({
                                        'stream_data':
                                        stream.copy(),
                                        'notify_action':
                                        'on_watched'
                                    })

                else:
                    # The user has stopped playing a stream
                    if stream['state'] != 'stopped':
                        logger.debug(
                            "Tautulli Monitor :: Session %s stopped." %
                            stream['session_key'])

                        if not stream['stopped']:
                            # Set the stream stop time
                            stream['stopped'] = helpers.timestamp()
                            monitor_db.action(
                                'UPDATE sessions SET stopped = ?, state = ? '
                                'WHERE session_key = ? AND rating_key = ?', [
                                    stream['stopped'], 'stopped',
                                    stream['session_key'], stream['rating_key']
                                ])

                        progress_percent = helpers.get_percent(
                            stream['view_offset'], stream['duration'])
                        notify_states = notification_handler.get_notify_state(
                            session=stream)
                        if (stream['media_type'] == 'movie' and progress_percent >= plexpy.CONFIG.MOVIE_WATCHED_PERCENT or
                            stream['media_type'] == 'episode' and progress_percent >= plexpy.CONFIG.TV_WATCHED_PERCENT or
                            stream['media_type'] == 'track' and progress_percent >= plexpy.CONFIG.MUSIC_WATCHED_PERCENT) \
                            and not any(d['notify_action'] == 'on_watched' for d in notify_states):
                            plexpy.NOTIFY_QUEUE.put({
                                'stream_data':
                                stream.copy(),
                                'notify_action':
                                'on_watched'
                            })

                        plexpy.NOTIFY_QUEUE.put({
                            'stream_data': stream.copy(),
                            'notify_action': 'on_stop'
                        })

                    # Write the item history on playback stop
                    row_id = monitor_process.write_session_history(
                        session=stream)

                    if row_id:
                        # If session is written to the databaase successfully, remove the session from the session table
                        logger.debug(
                            "Tautulli Monitor :: Removing sessionKey %s ratingKey %s from session queue"
                            % (stream['session_key'], stream['rating_key']))
                        monitor_process.delete_session(row_id=row_id)
                    else:
                        stream['write_attempts'] += 1

                        if stream[
                                'write_attempts'] < plexpy.CONFIG.SESSION_DB_WRITE_ATTEMPTS:
                            logger.warn("Tautulli Monitor :: Failed to write sessionKey %s ratingKey %s to the database. " \
                                        "Will try again on the next pass. Write attempt %s."
                                        % (stream['session_key'], stream['rating_key'], str(stream['write_attempts'])))
                            monitor_process.increment_write_attempts(
                                session_key=stream['session_key'])
                        else:
                            logger.warn("Tautulli Monitor :: Failed to write sessionKey %s ratingKey %s to the database. " \
                                        "Removing session from the database. Write attempt %s."
                                        % (stream['session_key'], stream['rating_key'], str(stream['write_attempts'])))
                            logger.debug(
                                "Tautulli Monitor :: Removing sessionKey %s ratingKey %s from session queue"
                                %
                                (stream['session_key'], stream['rating_key']))
                            monitor_process.delete_session(
                                session_key=stream['session_key'])

            # Process the newly received session data
            for session in media_container:
                new_session = monitor_process.write_session(session)

                if new_session:
                    logger.debug(
                        "Tautulli Monitor :: Session %s started by user %s (%s) with ratingKey %s (%s)%s."
                        % (str(session['session_key']), str(
                            session['user_id']), session['username'],
                           str(session['rating_key']), session['full_title'],
                           '[Live TV]' if session['live'] else ''))

        else:
            logger.debug("Tautulli Monitor :: Unable to read session list.")