def get_historical_job_data(self, job_id): history_logging = history.History(self.config) job_data = history_logging.read_completed_job_data(job_id) if 'statistics' in job_data: try: job_data['statistics'][ 'start_datetime'] = self.make_pretty_date_string( job_data['statistics']['start_time']) except KeyError: tornado.log.app_log.warning( "Error setting start datetime in historical item job data.", exc_info=True) try: job_data['statistics'][ 'finish_datetime'] = self.make_pretty_date_string( job_data['statistics']['finish_time']) except KeyError: tornado.log.app_log.warning( "Error setting finish datetime in historical item job data.", exc_info=True) try: duration = job_data['statistics']['finish_time'] - job_data[ 'statistics']['start_time'] m, s = divmod(duration, 60) h, m = divmod(m, 60) job_data['statistics'][ 'duration'] = '{:d} hours, {:02d} minutes, {:02d} seconds'.format( int(h), int(m), int(s)) except KeyError: tornado.log.app_log.warning( "Error setting duration in historical item job data.", exc_info=True) # TODO: Add audio and video encoder data return job_data
def write_history_log(self): # Read the current history log from file history_logging = history.History(self.settings) historical_log = history_logging.read_history_log() # Set the completed timestamp time_completed = time.time() # Set the job id job_id = '{}-{}'.format(common.random_string(), time_completed) # Append the file data to the history log historical_log.append({ 'job_id': job_id, 'description': self.current_task.source['basename'], 'time_complete': time_completed, 'abspath': self.current_task.source['abspath'], 'success': self.current_task.success }) # Create config path in not exists if not os.path.exists(self.settings.CONFIG_PATH): os.makedirs(self.settings.CONFIG_PATH) # Create completed job details path in not exists completed_job_details_dir = os.path.join(self.settings.CONFIG_PATH, 'completed_job_details') if not os.path.exists(completed_job_details_dir): os.makedirs(completed_job_details_dir) # Set path of history json file history_file = os.path.join(self.settings.CONFIG_PATH, 'history.json') # Set path of conversion details file job_details_file = os.path.join(completed_job_details_dir, '{}.json'.format(job_id)) result = common.json_dump_to_file(self.current_task.task_dump(), job_details_file) if not result['success']: for message in result['errors']: self._log("Exception in writing history to file:", message2=str(message), level="exception") result = common.json_dump_to_file(historical_log, history_file) if not result['success']: for message in result['errors']: self._log("Exception in writing history to file:", message2=str(message), level="exception")
def write_history_log(self): """ Record task history :return: """ self._log("Writing task history log.", level='debug') history_logging = history.History(self.settings) task_dump = self.current_task.task_dump() history_logging.save_task_history({ 'task_label': self.current_task.source['basename'], 'task_success': self.current_task.success, 'start_time': task_dump['statistics']['start_time'], 'finish_time': task_dump['statistics']['finish_time'], 'processed_by_worker': task_dump['statistics']['processed_by_worker'], 'task_dump': task_dump, })
def __init__(self): # Non config items (objects) self.name = "Config" self.settings = None # Set default db config self.apply_default_db_settings() # Run DB migrations self.run_db_migrations() # Init DB connection and read settings self.import_settings_from_db() # Import env variables self.import_settings_from_env() # Finally, read config from file and override all above settings. self.import_settings_from_file() # Apply settings to the unmanic logger self.setup_unmanic_logger() # Set the supported codecs (for destination) self.SUPPORTED_CODECS = unffmpeg.Info().get_all_supported_codecs() # Set the supported containers (for destination) self.SUPPORTED_CONTAINERS = unffmpeg.containers.get_all_containers() # TODO: Remove temporary beta data migration history_logging = history.History(self) history_logging.migrate_old_beta_data()
def get_historical_tasks(self): history_logging = history.History(self.config) return history_logging.read_history_log()
def get_historical_tasks(self): history_logging = history.History(self.config) self.historic_task_list = list( history_logging.get_historic_task_list(20)) return self.historic_task_list
def get_historical_job_data_for_template(self, job_id): history_logging = history.History(self.config) task_data = history_logging.get_historic_task_data_dictionary(job_id) if not task_data: return False # Set params as required in template template_task_data = { 'id': task_data['id'], 'task_label': task_data.get('task_label'), 'statistics': { 'task_success': task_data.get('task_success'), 'duration': '', 'start_time': task_data.get('start_time'), 'finish_time': task_data.get('finish_time'), 'start_datetime': '', 'finish_datetime': '', 'processed_by_worker': task_data.get('processed_by_worker'), }, 'source': {}, 'destination': {}, 'ffmpeg_log': '' } # Generate source/destination ffprobe data source_file_size = 0 destination_file_size = 0 for probe in task_data.get('historictaskprobe_set', []): if probe['type'] == 'source': template_task_data['source'] = probe source_file_size = probe['size'] elif probe['type'] == 'destination': template_task_data['destination'] = probe destination_file_size = probe['size'] # Generate statistics data # TODO: Add audio and video encoder data template_task_data['statistics']['source_file_size'] = source_file_size template_task_data['statistics']['destination_file_size'] = destination_file_size for ffmpeg_log in task_data.get('historictaskffmpeglog_set', []): template_task_data['ffmpeg_log'] += ffmpeg_log['dump'] try: template_task_data['statistics']['start_datetime'] = self.make_pretty_date_string( task_data.get('start_time')) except KeyError: tornado.log.app_log.warning("Error setting start datetime in historical item job data.", exc_info=True) try: template_task_data['statistics']['finish_datetime'] = self.make_pretty_date_string( task_data.get('finish_time')) except KeyError: tornado.log.app_log.warning("Error setting finish datetime in historical item job data.", exc_info=True) try: duration = task_data.get('finish_time') - task_data.get('start_time') m, s = divmod(duration, 60) h, m = divmod(m, 60) pretty_duration = '{:d} hours, {:02d} minutes, {:02d} seconds'.format(int(h), int(m), int(s)) template_task_data['statistics']['duration'] = pretty_duration except KeyError: tornado.log.app_log.warning("Error setting duration in historical item job data.", exc_info=True) return template_task_data
def get_historical_tasks(self): history_logging = history.History(self.config) return history_logging.get_historic_task_list()