def __init__(self):
     # set other data
     self.server_start_time = time.mktime(time.gmtime())
     self.users = {}  # maps user names to network instances
     self.option_config_json, self.db_connection = common_config_ini.com_config_read()
     common_global.es_inst.com_elastic_index('info', {'stuff': 'Ready for twisted connections!'})
     for cast_devices in self.db_connection.db_device_list(device_type='cast'):
         common_global.client_devices.append(('cast', cast_devices))
     for cast_devices in self.db_connection.db_device_list(device_type='roku'):
         common_global.client_devices.append(('roku', cast_devices))
 def build(self):
     global metaapp
     root = MediaKrakenApp()
     metaapp = self
     # start logging
     common_global.es_inst = common_logging_elasticsearch.CommonElasticsearch(
         'main_server_link')
     # open the database
     option_config_json, self.db_connection = common_config_ini.com_config_read()
     self.connect_to_server()
     return root
from flask_login import login_required

blueprint = Blueprint("admins_link", __name__,
                      url_prefix='/admin', static_folder="../static")
# need the following three items for admin check
import flask
from flask_login import current_user
from functools import wraps
from MediaKraken.admins.forms import LinkAddEditForm

from common import common_config_ini
from common import common_global
from common import common_pagination
import database as database_base

option_config_json, db_connection = common_config_ini.com_config_read()


def flash_errors(form):
    """
    Display errors from list
    """
    for field, errors in form.errors.items():
        for error in errors:
            flash("Error in the %s field - %s" % (
                getattr(form, field).label.text,
                error
            ))


def admin_required(fn):
Example #4
0
# -*- coding: utf-8 -*-

from flask import Blueprint, render_template, g, flash

blueprint = Blueprint("user_character",
                      __name__,
                      url_prefix='/users',
                      static_folder="../static")
import sys

sys.path.append('..')
sys.path.append('../..')
from common import common_config_ini
import database as database_base

db_connection = common_config_ini.com_config_read()


def flash_errors(form):
    """
    Display each error on top of form
    """
    for field, errors in form.errors.items():
        for error in errors:
            flash("Error in the %s field - %s" %
                  (getattr(form, field).label.text, error))


@blueprint.route("/charlist/<account_uuid>")
def character_list(account_uuid):
    """
 def setup_class(self):
     # open the database
     option_config_json, db_connection = common_config_ini.com_config_read(db_prod=False)
     self.imvdb_connection = common_metadata_imvdb.CommonMetadataIMVdb(
         option_config_json)
 def setup_class(self):
     # open the database
     option_config_json, db_connection = common_config_ini.com_config_read(db_prod=False)
     self.db_connection = common_metadata_thesportsdb.CommonMetadataTheSportsDB(
         option_config_json)
def worker(row_data):
    """
    Worker ffmpeg thread for each sync job
    """
    common_global.es_inst.com_elastic_index('info', {'row': row_data})
    # open the database
    option_config_json, thread_db = common_config_ini.com_config_read()
    # row_data
    # 0 mm_sync_guid uuid NOT NULL, 1 mm_sync_path text, 2 mm_sync_path_to text,
    # 3 mm_sync_options_json jsonb
    ffmpeg_params = ['./bin/ffmpeg', '-i', thread_db.db_media_path_by_uuid(
        row_data['mm_sync_options_json']['Media GUID'])[0]]
    if row_data['mm_sync_options_json']['Options']['Size'] != "Clone":
        ffmpeg_params.extend(('-fs',
                              row_data['mm_sync_options_json']['Options']['Size']))
    if row_data['mm_sync_options_json']['Options']['VCodec'] != "Copy":
        ffmpeg_params.extend(
            ('-vcodec', row_data['mm_sync_options_json']['Options']['VCodec']))
    if row_data['mm_sync_options_json']['Options']['AudioChannels'] != "Copy":
        ffmpeg_params.extend(('-ac',
                              row_data['mm_sync_options_json']['Options']['AudioChannels']))
    if row_data['mm_sync_options_json']['Options']['ACodec'] != "Copy":
        ffmpeg_params.extend(('-acodec',
                              row_data['mm_sync_options_json']['Options']['ACodec']))
    if row_data['mm_sync_options_json']['Options']['ASRate'] != 'Default':
        ffmpeg_params.extend(
            ('-ar', row_data['mm_sync_options_json']['Options']['ASRate']))
    ffmpeg_params.append(row_data['mm_sync_path_to'] + "."
                         + row_data['mm_sync_options_json']['Options']['VContainer'])
    common_global.es_inst.com_elastic_index('info', {'ffmpeg': ffmpeg_params})
    ffmpeg_pid = subprocess.Popen(split(ffmpeg_params), shell=False, stdout=subprocess.PIPE)
    # output after it gets started
    #  Duration: 01:31:10.10, start: 0.000000, bitrate: 4647 kb/s
    # frame= 1091 fps= 78 q=-1.0 Lsize=    3199kB time=00:00:36.48
    # bitrate= 718.4kbits/s dup=197 drop=0 speed= 2.6x
    media_duration = None
    while True:
        line = ffmpeg_pid.stdout.readline()
        if line != '':
            common_global.es_inst.com_elastic_index('info', {'ffmpeg out': line.rstrip()})
            if line.find("Duration:") != -1:
                media_duration = timedelta(
                    line.split(': ', 1)[1].split(',', 1)[0])
            elif line[0:5] == "frame":
                time_string = timedelta(line.split('=', 5)[5].split(' ', 1)[0])
                time_percent = time_string.total_seconds() / media_duration.total_seconds()
                thread_db.db_sync_progress_update(row_data['mm_sync_guid'],
                                                  time_percent)
                thread_db.db_commit()
        else:
            break
    ffmpeg_pid.wait()
    # deal with converted file
    if row_data['mm_sync_options_json']['Type'] == 'Local File System':
        # just go along merry way as ffmpeg shoulda output to mm_sync_path_to
        pass
    elif row_data['mm_sync_options_json']['Type'] == 'Remote Client':
        XFER_THREAD = common_xfer.FileSenderThread(row_data['mm_sync_options_json']['TargetIP'],
                                                   row_data['mm_sync_options_json']['TargetPort'],
                                                   row_data['mm_sync_path_to'] + "."
                                                   + row_data['mm_sync_options_json']['Options'][
                                                       'VContainer'],
                                                   row_data['mm_sync_path_to'])
    else:  # cloud item
        CLOUD_HANDLE = common_cloud.CommonCloud(option_config_json)
        CLOUD_HANDLE.com_cloud_file_store(row_data['mm_sync_options_json']['Type'],
                                          row_data['mm_sync_path_to'],
                                          row_data['mm_sync_path_to'] + "."
                                          + row_data['mm_sync_options_json']['Options'][
                                              'VContainer'].split('/', 1)[1], False)
    thread_db.db_sync_delete(row_data[0])  # guid of sync record
    # thread_db.store record in activity table
    thread_db.db_commit()
    thread_db.db_close()
    return
        # TODO add record for activity/etc for the user who ran this
        channel.basic_ack(delivery_tag=method_frame.delivery_tag)


# start logging
common_global.es_inst = common_logging_elasticsearch.CommonElasticsearch(
    'meta_api_worker_%s' % str(sys.argv[1]).lower())

# set signal exit breaks
common_signal.com_signal_set_break()

content_providers = str(sys.argv[1])
common_global.es_inst.com_elastic_index('info', {"worker meta api name":
                                                     content_providers})
# open the database
option_config_json, thread_db = common_config_ini.com_config_read()
# table the class_text into a dict...will lessen the db calls
class_text_dict = {}
for class_data in thread_db.db_media_class_list(None, None):
    class_text_dict[class_data['mm_media_class_guid']] = class_data['mm_media_class_type']

# pika rabbitmq connection
parameters = pika.ConnectionParameters('mkrabbitmq', socket_timeout=30,
                                       credentials=pika.PlainCredentials('guest', 'guest'))
connection = pika.BlockingConnection(parameters)

# setup channels and queue
channel = connection.channel()
exchange = channel.exchange_declare(exchange="mkque_metadata_ex",
                                    exchange_type="direct",
                                    durable=True)
def worker(audit_directory):
    """
    Worker thread for each directory
    """
    dir_path, media_class_type_uuid, dir_guid = audit_directory
    # open the database
    option_config_json, thread_db = common_config_ini.com_config_read()
    common_global.es_inst.com_elastic_index('info', {'worker dir': dir_path})
    original_media_class = thread_db.db_media_class_by_uuid(media_class_type_uuid)
    # update the timestamp now so any other media added DURING this scan don't get skipped
    thread_db.db_audit_dir_timestamp_update(dir_path)
    thread_db.db_audit_path_update_status(dir_guid,
                                          json.dumps({'Status': 'File search scan', 'Pct': 0}))
    thread_db.db_commit()
    # check for UNC before grabbing dir list
    if dir_path[:1] == "\\":
        file_data = []
        smb_stuff = common_network_cifs.CommonCIFSShare()
        addr, share, path = common_string.com_string_unc_to_addr_path(dir_path)
        smb_stuff.com_cifs_connect(addr)
        for dir_data in smb_stuff.com_cifs_walk(share, path):
            for file_name in dir_data[2]:
                file_data.append('\\\\' + addr + '\\' + share + '\\' + dir_data[0]
                                 + '\\' + file_name)
        smb_stuff.com_cifs_close()
    else:
        file_data = common_file.com_file_dir_list(dir_path, None, True, False)
    total_file_in_dir = len(file_data)
    total_scanned = 0
    total_files = 0
    for file_name in file_data:
        if file_name in global_known_media:
            pass
        else:
            # set lower here so I can remove alot of .lower() in the code below
            filename_base, file_extension = os.path.splitext(file_name.lower())
            # checking subtitles for parts as need multiple files for multiple media files
            if file_extension[1:] in common_file_extentions.MEDIA_EXTENSION \
                    or file_extension[1:] in common_file_extentions.SUBTITLE_EXTENSION \
                    or file_extension[1:] in common_file_extentions.GAME_EXTENSION:
                ffprobe_bif_data = True
                save_dl_record = True
                total_files += 1
                # set here which MIGHT be overrode later
                new_class_type_uuid = media_class_type_uuid
                media_class_text = thread_db.db_media_class_by_uuid(media_class_type_uuid)
                # check for "stacked" media file
                # the split below and the splitext above do return different results
                head, base_file_name = os.path.split(file_name)
                # check to see if it's a "stacked" file
                # including games since some are two or more discs
                if common_string.STACK_CD.search(base_file_name) is not None \
                        or common_string.STACK_PART.search(base_file_name) is not None \
                        or common_string.STACK_DVD.search(base_file_name) is not None \
                        or common_string.STACK_PT.search(base_file_name) is not None \
                        or common_string.STACK_DISK.search(base_file_name) is not None \
                        or common_string.STACK_DISC.search(base_file_name) is not None:
                    # check to see if it's part one or not
                    if common_string.STACK_CD1.search(base_file_name) is None \
                            and common_string.STACK_PART1.search(base_file_name) is None \
                            and common_string.STACK_DVD1.search(base_file_name) is None \
                            and common_string.STACK_PT1.search(base_file_name) is None \
                            and common_string.STACK_DISK1.search(base_file_name) is None \
                            and common_string.STACK_DISC1.search(base_file_name) is None:
                        # it's not a part one here so, no DL record needed
                        save_dl_record = False
                # video game data
                if thread_db.db_media_class_by_uuid(media_class_type_uuid) == 'Video Game':
                    if file_extension[1:] == 'iso':
                        new_class_type_uuid = class_text_dict['Game ISO']
                    elif file_extension[1:] == 'chd':
                        new_class_type_uuid = class_text_dict['Game CHD']
                    else:
                        new_class_type_uuid = class_text_dict['Game ROM']
                    ffprobe_bif_data = False
                # set new media class for subtitles
                elif file_extension[1:] in common_file_extentions.SUBTITLE_EXTENSION:
                    if original_media_class == 'Movie':
                        new_class_type_uuid = class_text_dict['Movie Subtitle']
                    elif original_media_class == 'TV Show' or original_media_class == 'TV Episode' \
                            or original_media_class == 'TV Season':
                        new_class_type_uuid = class_text_dict['TV Subtitle']
                    else:
                        new_class_type_uuid = class_text_dict['Subtitle']
                    ffprobe_bif_data = False
                # set new media class for trailers or themes
                elif file_name.find('/trailers/') != -1 \
                        or file_name.find('\\trailers\\') != -1 \
                        or file_name.find('/theme.mp3') != -1 \
                        or file_name.find('\\theme.mp3') != -1 \
                        or file_name.find('/theme.mp4') != -1 \
                        or file_name.find('\\theme.mp4') != -1:
                    if media_class_text == 'Movie':
                        if file_name.find('/trailers/') != -1 or file_name.find('\\trailers\\') != -1:
                            new_class_type_uuid = class_text_dict['Movie Trailer']
                        else:
                            new_class_type_uuid = class_text_dict['Movie Theme']
                    elif media_class_text == 'TV Show' or media_class_text == 'TV Episode' \
                            or media_class_text == 'TV Season':
                        if file_name.find('/trailers/') != -1 or file_name.find('\\trailers\\') != -1:
                            new_class_type_uuid = class_text_dict['TV Trailer']
                        else:
                            new_class_type_uuid = class_text_dict['TV Theme']
                # set new media class for extras
                elif file_name.find('/extras/') != -1 or file_name.find('\\extras\\') != -1:
                    if original_media_class == 'Movie':
                        new_class_type_uuid = class_text_dict['Movie Extras']
                    elif original_media_class == 'TV Show' \
                            or thread_db.db_media_class_by_uuid(media_class_type_uuid) == 'TV Episode' \
                            or media_class_text == 'TV Season':
                        new_class_type_uuid = class_text_dict['TV Extras']
                # set new media class for backdrops (usually themes)
                elif file_name.find('/backdrops/') != -1 \
                        or file_name.find('\\backdrops\\') != -1:
                    media_class_text = thread_db.db_media_class_by_uuid(new_class_type_uuid)
                    if file_name.find('/theme.mp3') != -1 \
                            or file_name.find('\\theme.mp3') != -1 \
                            or file_name.find('/theme.mp4') != -1 \
                            or file_name.find('\\theme.mp4') != -1:
                        if media_class_text == 'Movie':
                            new_class_type_uuid = class_text_dict['Movie Theme']
                        elif media_class_text == 'TV Show' or media_class_text == 'TV Episode' \
                                or media_class_text == 'TV Season':
                            new_class_type_uuid = class_text_dict['TV Theme']
                # flip around slashes for smb paths
                if file_name[:1] == "\\":
                    file_name = file_name.replace('\\\\', 'smb://*****:*****@').replace('\\', '/')
                # create media_json data
                media_json = json.dumps({'DateAdded': datetime.now().strftime("%Y-%m-%d")})
                media_id = str(uuid.uuid4())
                thread_db.db_insert_media(media_id, file_name, new_class_type_uuid, None, None, media_json)
                # verify ffprobe and bif should run on the data
                if ffprobe_bif_data and file_extension[1:] not in common_file_extentions.MEDIA_EXTENSION_SKIP_FFMPEG \
                        and file_extension[1:] in common_file_extentions.MEDIA_EXTENSION:
                    # Send a message so ffprobe runs
                    channel.basic_publish(exchange='mkque_ffmpeg_ex',
                                          routing_key='mkffmpeg',
                                          body=json.dumps(
                                              {'Type': 'FFProbe', 'Media UUID': media_id,
                                               'Media Path': file_name}),
                                          properties=pika.BasicProperties(content_type='text/plain',
                                                                          delivery_mode=1))
                    if new_class_type_uuid != class_text_dict['Music']:
                        # Send a message so roku thumbnail is generated
                        channel.basic_publish(exchange='mkque_roku_ex',
                                              routing_key='mkroku',
                                              body=json.dumps(
                                                  {'Type': 'Roku', 'Subtype': 'Thumbnail',
                                                   'Media UUID': media_id,
                                                   'Media Path': file_name}),
                                              properties=pika.BasicProperties(content_type='text/plain',
                                                                              delivery_mode=1))
                # verify it should save a dl "Z" record for search/lookup/etc
                if save_dl_record:
                    # media id begin and download que insert
                    thread_db.db_download_insert('Z', 0, json.dumps({'MediaID': media_id,
                                                                     'Path': file_name,
                                                                     'ClassID': new_class_type_uuid,
                                                                     'Status': None,
                                                                     'MetaNewID': str(uuid.uuid4()),
                                                                     'ProviderMetaID': None}))
        total_scanned += 1
        thread_db.db_audit_path_update_status(dir_guid,
                                              json.dumps({'Status': 'File scan: '
                                                                    + common_internationalization.com_inter_number_format(
                                                  total_scanned)
                                                                    + ' / ' + common_internationalization.com_inter_number_format(
                                                  total_file_in_dir),
                                                          'Pct': (
                                                                         total_scanned / total_file_in_dir) * 100}))
        thread_db.db_commit()
    # end of for loop for each file in library
    common_global.es_inst.com_elastic_index('info',
                                            {'worker dir done': dir_path,
                                             'media class': media_class_type_uuid})
    # set to none so it doesn't show up anymore in admin status page
    thread_db.db_audit_path_update_status(dir_guid, None)
    if total_files > 0:
        # add notification to admin status page
        thread_db.db_notification_insert(
            common_internationalization.com_inter_number_format(total_files)
            + " file(s) added from " + dir_path, True)
    thread_db.db_commit()
    thread_db.db_close()
    return
 def setup_class(self):
     # open the database
     option_config_json, db_connection = common_config_ini.com_config_read(db_prod=False)
     self.thetvdb_connection = common_thetvdb.CommonTheTVDB(
         option_config_json)
 def setup_class(self):
     # open the database
     option_config_json, db_connection = common_config_ini.com_config_read(db_prod=False)
     self.musicbrainz_connection = common_metadata_musicbrainz.CommonMetadataMusicbrainz(
         option_config_json)
 def load_hash_map_from_database(self):
     if '-nolist' in sys.argv:
         return True
     common_global.es_inst.com_elastic_index('info', {'stuff': "loading roms from db"})
     # open the database
     option_config_json, db_connection = common_config_ini.com_config_read()
     # read all the audited games
     conn_game = connect('db/game_database.db')
     curs_game = conn_game.cursor()
     conn_game.text_factory = lambda x: str(x, "utf-8", "ignore")
     curs_game.execute("attach database 'db/hubcade_gui.db' as gui_db")
     curs_game.execute('select gs_system_long_name,gi_short_name,gi_long_name,gi_id,'
                       '(select gm_rotate from game_monitor where gm_id = gi_monitor_id),gi_players,'
                       'gc_category from game_info,gui_db.game_audit,game_systems,game_category'
                       ' where gi_id = gui_db.game_audit.ga_game_id and gs_id = gi_system_id'
                       ' and gi_gc_category = gc_id union all select \'Arcade\',gi_short_name,gi_long_name,'
                       'gi_id,(select gm_rotate from game_monitor where gm_id = gi_monitor_id),gi_players,'
                       'gc_category from game_info,gui_db.game_audit,game_category where gi_system_id = 0'
                       ' and gi_id = gui_db.game_audit.ga_game_id and gi_gc_category = gc_id')
     # for the times/time played
     conn_game_info = connect('db/hubcade_gui.db')
     curs_game_info = conn_game_info.cursor()
     conn_game_info.text_factory = lambda x: str(x, "utf-8", "ignore")
     # begin parse of data
     Client_GlobalData.audited_games = 0
     Client_GlobalData.audit_gameList = {}
     old_system_long_name = None
     first_record = True
     game_info = {}
     for sql_row in curs_game:
         Client_GlobalData.audited_games += 1
         game_times_played = 0
         game_time_played = 0
         game_monitor = "NA"
         game_players = 0
         game_category = "NA"
         sql_args = str(sql_row[3]),
         curs_game_info.execute('select game_times_played,game_time_played from game_info'
                                ' where game_rom_id = ?', sql_args)
         row = curs_game_info.fetchone()
         if row is None:
             pass
         else:
             game_times_played = row[0]
             game_time_played = row[1]
         if sql_row[4] is not None:
             if int(sql_row[4]) == 0 or int(sql_row[4]) == 180:
                 game_monitor = "Horizontal"
             else:
                 game_monitor = "Vertical"
         if sql_row[5] is not None:
             game_players = sql_row[5]
         if sql_row[6] is not None:
             game_category = sql_row[6]
         if first_record:
             old_system_long_name = sql_row[0]
             first_record = False
         game_name = sql_row[1]
         if sql_row[2] is not None:
             game_name = sql_row[2]
         if old_system_long_name != sql_row[0]:
             if len(game_info) > 0:
                 Client_GlobalData.audit_gameList[old_system_long_name] \
                     = copy.deepcopy(list(game_info.items()))
                 Client_GlobalData.audit_gameList[old_system_long_name].sort(
                 )
             old_system_long_name = sql_row[0]
             game_info = {}
         game_info[game_name] = game_times_played, game_time_played, game_monitor, \
                                game_players, str(sql_row[3]), game_category
     # catch last data from db
     if old_system_long_name is not None and len(game_info) > 0:
         Client_GlobalData.audit_gameList[old_system_long_name] \
             = copy.deepcopy(list(game_info.items()))
         Client_GlobalData.audit_gameList[old_system_long_name].sort()
     curs_game_info.close()
     conn_game_info.close()
     curs_game.close()
     conn_game.close()
     # close the database
     db_connection.db_close()
     return True