Esempio n. 1
0
    def __init__(self, config_path=None, db_connection=None):
        # Non config items (objects)
        self.name = "Config"
        self.settings = None
        self.db_connection = db_connection

        # Apply default DB settings
        #self.apply_default_db_settings(config_path)

        # Import env variables and override all previous settings.
        self.import_settings_from_env()
        # Read settings from database
        self.import_settings_from_db()
        # TODO: Retire this. It is not needed any longer
        # Finally, re-read config from file and override all previous settings.
        self.import_settings_from_file(config_path)

        # Overwrite current settings
        if config_path:
            self.set_config_item('config_path',
                                 config_path,
                                 save_settings=False)

        # Apply settings to the unmanic logger
        self.setup_unmanic_logger()

        # Save settings
        if self.settings and self.db_connection:
            self.settings.save()

        # TODO: Remove temporary beta data migration
        history_logging = history.History(self)
        history_logging.migrate_old_beta_data()
Esempio n. 2
0
    def write_history_log(self):
        """
        Record task history

        :return:
        """
        self._log("Writing task history log.", level='debug')
        history_logging = history.History()
        task_dump = self.current_task.task_dump()

        history_logging.save_task_history({
            'task_label':
            task_dump.get('task_label', ''),
            'abspath':
            task_dump.get('abspath', ''),
            'task_success':
            task_dump.get('task_success', ''),
            'start_time':
            task_dump.get('start_time', ''),
            'finish_time':
            task_dump.get('finish_time', ''),
            'processed_by_worker':
            task_dump.get('processed_by_worker', ''),
            'log':
            task_dump.get('log', ''),
        })
Esempio n. 3
0
    def __init__(self):
        # Non config items (objects)
        self.name = "Config"
        self.settings = None

        # Set default db config
        self.DATABASE = None
        self.apply_default_db_settings()
        # Run DB migrations
        self.run_db_migrations()
        # Init DB connection and read settings
        self.import_settings_from_db()
        # Import env variables
        self.import_settings_from_env()
        # Finally, read config from file and override all above settings.
        self.import_settings_from_file()
        # Apply settings to the unmanic logger
        self.setup_unmanic_logger()
        # Set the supported codecs (for destination)
        self.SUPPORTED_CODECS = unffmpeg.Info().get_all_supported_codecs()
        # Set the supported containers (for destination)
        self.SUPPORTED_CONTAINERS = unffmpeg.containers.get_all_containers()
        # TODO: Remove temporary beta data migration
        history_logging = history.History(self)
        history_logging.migrate_old_beta_data()
Esempio n. 4
0
 def get_historical_tasks(self):
     self.historic_task_list = []
     history_logging = history.History(self.config)
     historic_task_list = list(history_logging.get_historic_task_list(20))
     for historical_item in historic_task_list:
         human_readable_time = common.make_timestamp_human_readable(int(historical_item['finish_time']))
         historical_item['human_readable_time'] = human_readable_time
         self.historic_task_list.append(historical_item)
     return self.historic_task_list
Esempio n. 5
0
    def write_history_log(self):
        """
        Record task history

        :return:
        """
        self._log("Writing task history log.", level='debug')
        history_logging = history.History(self.settings)
        task_dump = self.current_task.task_dump()

        try:
            destination_data = self.current_task.get_destination_data()
            destination_file_probe = self.ffmpeg.file_probe(
                destination_data['abspath'])
            file_probe_format = destination_file_probe.get('format', {})

            destination_data.update({
                'bit_rate':
                file_probe_format.get('bit_rate', ''),
                'format_long_name':
                file_probe_format.get('format_long_name', ''),
                'format_name':
                file_probe_format.get('format_name', ''),
                'size':
                file_probe_format.get('size', ''),
                'duration':
                file_probe_format.get('duration', ''),
                'streams':
                destination_file_probe.get('streams', [])
            })
            task_dump['file_probe_data']['destination'] = destination_data
        except unffmpeg.exceptions.ffprobe.FFProbeError as e:
            self._log("Exception in method write_history_log",
                      str(e),
                      level='exception')
        except Exception as e:
            self._log("Exception in method write_history_log",
                      str(e),
                      level='exception')

        history_logging.save_task_history({
            'task_label':
            task_dump.get('task_label', ''),
            'task_success':
            task_dump.get('task_success', ''),
            'start_time':
            task_dump.get('start_time', ''),
            'finish_time':
            task_dump.get('finish_time', ''),
            'processed_by_worker':
            task_dump.get('processed_by_worker', ''),
            'task_dump':
            task_dump,
        })
Esempio n. 6
0
def remove_completed_tasks(completed_task_ids):
    """
    Removes a list of completed tasks

    :param completed_task_ids:
    :return:
    """
    # Delete by ID
    task_handler = history.History()
    return task_handler.delete_historic_tasks_recursively(
        id_list=completed_task_ids)
Esempio n. 7
0
    def delete_historic_tasks(self, historic_task_ids):
        """
        Deletes a list of historic tasks

        :param historic_task_ids:
        :return:
        """
        # Fetch historical tasks
        history_logging = history.History(self.config)
        # Delete by ID
        return history_logging.delete_historic_tasks_recursively(
            id_list=historic_task_ids)
Esempio n. 8
0
    def file_failed_in_history(self):
        """
        Check if file has already failed in history

        :return:
        """
        # Fetch historical tasks
        history_logging = history.History(self.settings)
        task_results = history_logging.get_historic_tasks_list_with_source_probe(abspath=self.path, task_success=False)
        if not task_results:
            # No results were found matching that pathname
            return False
        # That pathname was found in the results of failed historic tasks
        return True
Esempio n. 9
0
def read_command_log_for_task(task_id):
    data = {
        'command_log': '',
        'command_log_lines': [],
    }
    task_handler = history.History()
    task_data = task_handler.get_historic_task_data_dictionary(task_id=task_id)
    if not task_data:
        return data

    for command_log in task_data.get('completedtaskscommandlogs_set', []):
        data['command_log'] += command_log['dump']
        data['command_log_lines'] += format_ffmpeg_log_text(
            command_log['dump'].split("\n"))

    return data
Esempio n. 10
0
    def file_failed_in_history(self, path):
        """
        Check if file has already failed in history

        :return:
        """
        # Fetch historical tasks
        history_logging = history.History()
        if not self.failed_paths:
            failed_tasks = history_logging.get_historic_tasks_list_with_source_probe(
                task_success=False)
            for task in failed_tasks:
                self.failed_paths.append(task.get('abspath'))
        if path in self.failed_paths:
            # That pathname was found in the results of failed historic tasks
            return True
        # No results were found matching that pathname
        return False
Esempio n. 11
0
def add_historic_tasks_to_pending_tasks_list(historic_task_ids,
                                             library_id=None):
    """
    Adds a list of historical tasks to the pending tasks list.

    :param historic_task_ids:
    :param library_id:
    :return:
    """
    errors = {}
    # Fetch historical tasks
    history_logging = history.History()
    # Get total count
    records_by_id = history_logging.get_current_path_of_historic_tasks_by_id(
        id_list=historic_task_ids)
    for record in records_by_id:
        record_errors = []
        # Fetch the abspath name
        abspath = os.path.abspath(record.get("abspath"))

        # Ensure path exists
        if not os.path.exists(abspath):
            errors[record.get("id")] = "Path does not exist - '{}'".format(
                abspath)
            continue

        # Create a new task
        new_task = task.Task()

        if not new_task.create_task_by_absolute_path(abspath,
                                                     library_id=library_id):
            # If file exists in task queue already this will return false.
            # Do not carry on.
            errors[record.get(
                "id")] = "File already in task queue - '{}'".format(abspath)

        continue
    return errors
Esempio n. 12
0
 async def async_completed_tasks_info(self):
     while self.sending_completed_tasks_info:
         return_data = []
         history_logging = history.History(self.config)
         historic_task_list = list(
             history_logging.get_historic_task_list(20))
         for historical_item in historic_task_list:
             if (int(historical_item['finish_time']) + 60) > int(
                     time.time()):
                 historical_item['human_readable_time'] = 'Just Now'
             else:
                 human_readable_time = common.make_timestamp_human_readable(
                     int(historical_item['finish_time']))
                 historical_item[
                     'human_readable_time'] = human_readable_time
             return_data.append(historical_item)
         await self.write_message({
             'success': True,
             'server_id': self.server_id,
             'type': 'completed_tasks',
             'data': return_data,
         })
         await gen.sleep(10)
Esempio n. 13
0
    def write_history_log(self):
        """
        Record task history

        :return:
        """
        self._log("Writing task history log.", level='debug')
        history_logging = history.History(self.settings)
        task_dump = self.current_task.task_dump()
        history_logging.save_task_history({
            'task_label':
            self.current_task.source['basename'],
            'task_success':
            self.current_task.success,
            'start_time':
            task_dump['statistics']['start_time'],
            'finish_time':
            task_dump['statistics']['finish_time'],
            'processed_by_worker':
            task_dump['statistics']['processed_by_worker'],
            'task_dump':
            task_dump,
        })
Esempio n. 14
0
    def add_historic_tasks_to_pending_tasks_list(self, historic_task_ids):
        """
        Adds a list of historical tasks to the pending tasks list.

        :param historic_task_ids:
        :return:
        """
        success = True
        # Fetch historical tasks
        history_logging = history.History(self.config)
        # Get total count
        records_by_id = history_logging.get_current_path_of_historic_tasks_by_id(
            id_list=historic_task_ids)
        # records_by_id = history_logging.get_historic_task_list_filtered_and_sorted(id_list=historic_task_ids)
        for record in records_by_id:
            # Fetch the abspath name
            abspath = os.path.abspath(record.get("abspath"))

            # Ensure path exists
            if not os.path.exists(abspath):
                success = False
                continue

            # Create a new task
            new_task = task.Task(tornado.log.app_log)

            # Run a probe on the file for current data
            source_data = common.fetch_file_data_by_path(abspath)

            if not new_task.create_task_by_absolute_path(
                    abspath, self.config, source_data):
                # If file exists in task queue already this will return false.
                # Do not carry on.
                success = False

            continue
        return success
Esempio n. 15
0
    def manage_completed_tasks(self):
        settings = config.Config()
        # Only run if configured to auto manage completed tasks
        if not settings.get_auto_manage_completed_tasks():
            return

        self._log("Running completed task cleanup for this installation")
        max_age_in_days = settings.get_max_age_of_completed_tasks()
        date_x_days_ago = datetime.now() - timedelta(days=int(max_age_in_days))
        before_time = date_x_days_ago.timestamp()

        task_success = True
        inc_status = 'successfully'
        if not settings.get_always_keep_failed_tasks():
            inc_status = 'successfully or failed'
            task_success = None

        # Fetch completed tasks
        from unmanic.libs import history
        history_logging = history.History()
        count = history_logging.get_historic_task_list_filtered_and_sorted(task_success=task_success,
                                                                           before_time=before_time).count()
        results = history_logging.get_historic_task_list_filtered_and_sorted(task_success=task_success,
                                                                             before_time=before_time)

        if count == 0:
            self._log("Found no {} completed tasks older than {} days".format(inc_status, max_age_in_days))
            return

        self._log(
            "Found {} {} completed tasks older than {} days that should be removed".format(count, inc_status, max_age_in_days))
        if not history_logging.delete_historic_tasks_recursively(results):
            self._log("Failed to delete {} {} completed tasks".format(count, inc_status), level='error')
            return

        self._log("Deleted {} {} completed tasks".format(count, inc_status))
Esempio n. 16
0
    def __init__(self, db_file=None):
        # Non config items (objects)
        self.name = "Config"
        self.settings = None

        # Set default UI port
        self.UI_PORT = 8888
        # Set default config directory
        self.CONFIG_PATH = CONFIG_PATH

        # Set default db config
        self.DATABASE = None
        self.apply_default_db_settings()
        # Overwrite default DB config
        if db_file:
            self.DATABASE['FILE'] = db_file

        # Import env variables and override all previous settings.
        self.import_settings_from_env()
        # Read config from file and override all previous settings (this may include the DB location).
        self.import_settings_from_file()
        # Run DB migrations
        self.run_db_migrations()
        # Init DB connection and read settings
        self.import_settings_from_db()
        # Finally, re-read config from file and override all previous settings.
        self.import_settings_from_file()
        # Apply settings to the unmanic logger
        self.setup_unmanic_logger()
        # Set the supported codecs (for destination)
        self.SUPPORTED_CODECS = unffmpeg.Info().get_all_supported_codecs()
        # Set the supported containers (for destination)
        self.SUPPORTED_CONTAINERS = unffmpeg.containers.get_all_containers()
        # TODO: Remove temporary beta data migration
        history_logging = history.History(self)
        history_logging.migrate_old_beta_data()
Esempio n. 17
0
def prepare_filtered_completed_tasks(params):
    """
    Returns a object of historical records filtered and sorted
    according to the provided request.

    :param params:
    :return:
    """
    start = params.get('start', 0)
    length = params.get('length', 0)

    search_value = params.get('search_value', '')
    status = params.get('status', 'all')

    order = params.get('order', {
        "column": 'finish_time',
        "dir": 'desc',
    })

    # Define filters
    task_success = None
    if status == 'success':
        task_success = True
    elif status == 'failed':
        task_success = False

    after_time = None
    if params.get('after'):
        after_time = datetime.strptime(params.get('after'),
                                       '%Y-%m-%dT%H:%M:%S').timestamp()

    before_time = None
    if params.get('before'):
        before_time = datetime.strptime(params.get('before'),
                                        '%Y-%m-%dT%H:%M:%S').timestamp()

    # Fetch historical tasks
    history_logging = history.History()
    # Get total count
    records_total_count = history_logging.get_total_historic_task_list_count()
    # Get total success count
    records_total_success_count = history_logging.get_historic_task_list_filtered_and_sorted(
        task_success=True).count()
    # Get total failed count
    records_total_failed_count = history_logging.get_historic_task_list_filtered_and_sorted(
        task_success=False).count()
    # Get quantity after filters (without pagination)
    records_filtered_count = history_logging.get_historic_task_list_filtered_and_sorted(
        order=order,
        start=0,
        length=0,
        search_value=search_value,
        task_success=task_success,
        after_time=after_time,
        before_time=before_time).count()
    # Get filtered/sorted results
    task_results = history_logging.get_historic_task_list_filtered_and_sorted(
        order=order,
        start=start,
        length=length,
        search_value=search_value,
        task_success=task_success,
        after_time=after_time,
        before_time=before_time)

    # Build return data
    return_data = {
        "recordsTotal": records_total_count,
        "recordsFiltered": records_filtered_count,
        "successCount": records_total_success_count,
        "failedCount": records_total_failed_count,
        "results": []
    }

    # Iterate over tasks and append them to the task data
    for task in task_results:
        # Set params as required in template
        item = {
            'id': task['id'],
            'task_label': task['task_label'],
            'task_success': task['task_success'],
            'finish_time': task['finish_time'],
        }
        return_data["results"].append(item)

    # Return results
    return return_data
Esempio n. 18
0
    def get_historical_job_data_for_template(self, job_id):
        history_logging = history.History(self.config)
        task_data = history_logging.get_historic_task_data_dictionary(job_id)
        if not task_data:
            return False
        # Set params as required in template
        template_task_data = {
            'id': task_data['id'],
            'task_label': task_data.get('task_label'),
            'statistics': {
                'task_success': task_data.get('task_success'),
                'duration': '',
                'start_time': task_data.get('start_time'),
                'finish_time': task_data.get('finish_time'),
                'start_datetime': '',
                'finish_datetime': '',
                'processed_by_worker': task_data.get('processed_by_worker'),
            },
            'source': {},
            'destination': {},
            'ffmpeg_log': '',
            'ffmpeg_log_lines': [],
        }

        # Generate source/destination ffprobe data
        source_file_size = 0
        destination_file_size = 0
        for probe in task_data.get('historictaskprobe_set', []):
            if probe['type'] == 'source':
                template_task_data['source'] = probe
                source_file_size = probe['size']
            elif probe['type'] == 'destination':
                template_task_data['destination'] = probe
                destination_file_size = probe['size']

        # Generate statistics data
        # TODO: Add audio and video encoder data
        template_task_data['statistics']['source_file_size'] = source_file_size
        template_task_data['statistics'][
            'destination_file_size'] = destination_file_size

        for ffmpeg_log in task_data.get('historictaskffmpeglog_set', []):
            template_task_data['ffmpeg_log'] += ffmpeg_log['dump']
            template_task_data[
                'ffmpeg_log_lines'] += self.format_ffmpeg_log_text(
                    ffmpeg_log['dump'].split("\n"))

        try:
            template_task_data['statistics'][
                'start_datetime'] = self.make_pretty_date_string(
                    task_data.get('start_time'))
        except KeyError:
            tornado.log.app_log.warning(
                "Error setting start datetime in historical item job data.",
                exc_info=True)

        try:
            template_task_data['statistics'][
                'finish_datetime'] = self.make_pretty_date_string(
                    task_data.get('finish_time'))
        except KeyError:
            tornado.log.app_log.warning(
                "Error setting finish datetime in historical item job data.",
                exc_info=True)

        try:
            duration = task_data.get('finish_time') - task_data.get(
                'start_time')
            m, s = divmod(duration, 60)
            h, m = divmod(m, 60)
            pretty_duration = '{:d} hours, {:02d} minutes, {:02d} seconds'.format(
                int(h), int(m), int(s))
            template_task_data['statistics']['duration'] = pretty_duration
        except KeyError:
            tornado.log.app_log.warning(
                "Error setting duration in historical item job data.",
                exc_info=True)

        return template_task_data
Esempio n. 19
0
 def get_historical_tasks(self):
     history_logging = history.History(self.config)
     return history_logging.get_historic_task_list()
Esempio n. 20
0
    def prepare_filtered_historic_tasks(self, request_dict):
        """
        Returns a object of historical records filtered and sorted
        according to the provided request.

        :param request_dict:
        :return:
        """

        # Generate filters for query
        draw = request_dict.get('draw')
        start = request_dict.get('start')
        length = request_dict.get('length')

        search = request_dict.get('search')
        search_value = search.get("value")

        # Get sort order
        filter_order = request_dict.get('order')[0]
        order = {
            "column": filter_order.get('column'),
            "dir": filter_order.get('dir'),
        }
        for column in request_dict.get('columns'):
            column_name = column.get("name")
            if column_name == order["column"]:
                order["column"] = column.get("data")

        # Fetch historical tasks
        history_logging = history.History(self.config)
        # Get total count
        records_total_count = history_logging.get_total_historic_task_list_count(
        )
        # Get quantity after filters (without pagination)
        records_filtered_count = history_logging.get_historic_task_list_filtered_and_sorted(
            order, 0, 0, search_value).count()
        # Get filtered/sorted results
        task_results = history_logging.get_historic_task_list_filtered_and_sorted(
            order, start, length, search_value)

        # Build return data
        return_data = {
            "draw": draw,
            "recordsTotal": records_total_count,
            "recordsFiltered": records_filtered_count,
            "successCount": 0,
            "failedCount": 0,
            "data": []
        }

        # Iterate over historical tasks and append them to the task data
        for task in task_results:
            # Set params as required in template
            item = {
                'id':
                task['id'],
                'selected':
                False,
                'finish_time':
                time.strftime('%Y-%m-%d %H:%M:%S',
                              time.localtime(task['finish_time'])),
                'task_label':
                task['task_label'],
                'task_success':
                task['task_success'],
            }
            # Increment counters
            if item['task_success']:
                return_data["successCount"] += 1
            else:
                return_data["failedCount"] += 1
            return_data["data"].append(item)

        # Return results
        return return_data