示例#1
0
def episodes(series_id, season):
    xbmcplugin.setContent(plugin_handle, 'episodes')
    content = get.series_info(series_id)
    content = json.loads(content)['video']['seasons']
    for test in content:
        episode_season = unicode(test['seq'])
        if episode_season == season:
            for item in test['episodes']:
                playcount = 0
                episode_id = item['episodeId']
                episode_nr = item['seq']
                episode_title = (unicode(episode_nr) + '. ' + item['title'])
                duration = item['runtime']
                offset = item['bookmark']['offset']
                if (duration > 0 and float(offset) / float(duration)) >= 0.9:
                    playcount = 1
                description = item['synopsis']
                try:
                    thumb = item['stills'][0]['url']
                except:
                    thumb = utility.addon_fanart()
                add.episode(episode_title, episode_id, 'play_video_main', thumb, description, duration, season,
                            episode_nr, series_id, playcount)
    if utility.get_setting('force_view'):
        xbmc.executebuiltin('Container.SetViewMode(' + utility.get_setting('view_id_episodes') + ')')
    xbmcplugin.endOfDirectory(plugin_handle)
示例#2
0
def search(search_string, video_type, run_as_widget=False):
    i = 1
    loading_progress = None
    if not run_as_widget:
        loading_progress = xbmcgui.DialogProgress()
        loading_progress.create('Netflix', utility.get_string(30205) + '...')
        utility.progress_window(loading_progress, 0, '...')
    xbmcplugin.setContent(plugin_handle, 'movies')
    if not xbmcvfs.exists(utility.session_file()):
        login.login()
    post_data = '{"paths":[["search","%s",{"from":0,"to":48},["summary","title"]],["search","%s",["id","length",' \
                '"name","trackIds","requestId"]]],"authURL":"%s"}' % (search_string, search_string,
                                                                      utility.get_setting('authorization_url'))
    content = utility.decode(connect.load_site(utility.evaluator(), post=post_data))
    try:
        matches = json.loads(content)['value']['videos']
        for k in matches:
            if not run_as_widget:
                utility.progress_window(loading_progress, i * 100 / len(matches), '...')
            video(unicode(matches[k]['summary']['id']), '', '', False, False, video_type, '')
            i += 1
        if utility.get_setting('force_view') and not run_as_widget:
            xbmc.executebuiltin('Container.SetViewMode(' + utility.get_setting('view_id_videos') + ')')
        xbmcplugin.endOfDirectory(plugin_handle)
    except Exception:
        utility.notification(utility.get_string(30306))
        pass
示例#3
0
def episodes(series_id, season):
    xbmcplugin.setContent(plugin_handle, 'episodes')
    content = get.series_info(series_id)
    content = json.loads(content)['video']['seasons']
    for test in content:
        episode_season = unicode(test['seq'])
        if episode_season == season:
            for item in test['episodes']:
                episode_id = item['episodeId']
                episode_nr = item['seq']
                episode_title = (unicode(episode_nr) + '. ' + item['title'])
                duration = item['runtime']
                offset = item['bookmark']['offset']
                playcount = 0
                if (duration > 0 and float(offset) / float(duration)) >= 0.9:
                    playcount = 1
                description = item['synopsis']
                try:
                    thumb = item['stills'][0]['url']
                except:
                    thumb = utility.addon_fanart()
                add.episode(episode_title, episode_id, 'play_video_main',
                            thumb, description, duration, season, episode_nr,
                            series_id, playcount)
    if utility.get_setting('force_view'):
        xbmc.executebuiltin('Container.SetViewMode(' +
                            utility.get_setting('view_id_episodes') + ')')
    xbmcplugin.endOfDirectory(plugin_handle)
示例#4
0
def search(search_string, video_type, run_as_widget=False):
    i = 1
    loading_progress = None
    if not run_as_widget:
        loading_progress = xbmcgui.DialogProgress()
        loading_progress.create('Netflix', utility.get_string(30205) + '...')
        utility.progress_window(loading_progress, 0, '...')
    xbmcplugin.setContent(plugin_handle, 'movies')
    if not xbmcvfs.exists(utility.session_file()):
        login.login()
    post_data = '{"paths":[["search","%s",{"from":0,"to":48},["summary","title"]],["search","%s",["id","length",' \
                '"name","trackIds","requestId"]]],"authURL":"%s"}' % (search_string, search_string,
                                                                      utility.get_setting('authorization_url'))
    content = utility.decode(
        connect.load_site(utility.evaluator_url %
                          (utility.get_setting('netflix_application'),
                           utility.get_setting('netflix_version')),
                          post=post_data))
    try:
        matches = json.loads(content)['value']['videos']
        for k in matches:
            if not run_as_widget:
                utility.progress_window(loading_progress,
                                        i * 100 / len(matches), '...')
            video(unicode(matches[k]['summary']['id']), '', '', False, False,
                  video_type, '')
            i += 1
        if utility.get_setting('force_view') and not run_as_widget:
            xbmc.executebuiltin('Container.SetViewMode(' +
                                utility.get_setting('view_id_videos') + ')')
        xbmcplugin.endOfDirectory(plugin_handle)
    except Exception:
        utility.notification(utility.get_string(30306))
        pass
示例#5
0
def videos(url, video_type, run_as_widget=False):
    post_data = ''
    i = 1
    loading_progress = None
    if not run_as_widget:
        loading_progress = xbmcgui.DialogProgress()
        loading_progress.create('Netflix', utility.get_string(30205) + '...')
        utility.progress_window(loading_progress, 0, '...')
    xbmcplugin.setContent(plugin_handle, 'movies')
    if not xbmcvfs.exists(utility.session_file()):
        login.login()
    if 'recently-added' in url:
        post_data = utility.recently_added % utility.get_setting('authorization_url')
    elif 'genre' in url:
        post_data = utility.genre % (url.split('?')[1], utility.get_setting('authorization_url'))
    elif 'my-list' in url:
        post_data = utility.my_list % utility.get_setting('authorization_url')
    content = utility.decode(connect.load_site(utility.evaluator(), post=post_data))
    matches = json.loads(content)['value']['videos']
    for video_id in matches:
        if not run_as_widget:
            utility.progress_window(loading_progress, i * 100 / len(matches), matches[video_id]['title'])
        video(unicode(video_id), '', '', False, False, video_type, url)
        i += 1
    if utility.get_setting('force_view') == 'true' and not run_as_widget:
        xbmc.executebuiltin('Container.SetViewMode(' + utility.get_setting('view_id_videos') + ')')
    xbmcplugin.endOfDirectory(plugin_handle)
示例#6
0
def videos(url, video_type, run_as_widget=False):
    post_data = ''
    i = 1
    loading_progress = None
    if not run_as_widget:
        loading_progress = xbmcgui.DialogProgress()
        loading_progress.create('Netflix', utility.get_string(30205) + '...')
        utility.progress_window(loading_progress, 0, '...')
    xbmcplugin.setContent(plugin_handle, 'movies')
    if not xbmcvfs.exists(utility.session_file()):
        login.login()
    if 'recently-added' in url:
        post_data = utility.recently_added % utility.get_setting(
            'authorization_url')
    elif 'genre' in url:
        post_data = utility.genre % (url.split('?')[1],
                                     utility.get_setting('authorization_url'))
    elif 'my-list' in url:
        post_data = utility.my_list % utility.get_setting('authorization_url')
    content = utility.decode(
        connect.load_site(utility.evaluator(), post=post_data))
    matches = json.loads(content)['value']['videos']
    for video_id in matches:
        if not run_as_widget:
            utility.progress_window(loading_progress, i * 100 / len(matches),
                                    matches[video_id]['title'])
        video(unicode(video_id), '', '', False, False, video_type, url)
        i += 1
    if utility.get_setting('force_view') == 'true' and not run_as_widget:
        xbmc.executebuiltin('Container.SetViewMode(' +
                            utility.get_setting('view_id_videos') + ')')
    xbmcplugin.endOfDirectory(plugin_handle)
示例#7
0
def load():
    if utility.get_setting("selected_profile"):
        connect.load_site(utility.profile_switch_url + utility.get_setting("selected_profile"))
        connect.save_session()
    else:
        utility.log("Load profile: no stored profile found!", loglevel=xbmc.LOGERROR)
    get_my_list_change_authorisation()
示例#8
0
def genres(video_type):
    post_data = ''
    match = []
    xbmcplugin.addSortMethod(plugin_handle, xbmcplugin.SORT_METHOD_LABEL)
    if not xbmcvfs.exists(utility.session_file()):
        login.login()
    if video_type == 'tv':
        post_data = utility.series_genre % utility.get_setting(
            'authorization_url')
    elif video_type == 'movie':
        post_data = utility.movie_genre % utility.get_setting(
            'authorization_url')
    else:
        pass
    content = utility.decode(
        connect.load_site(utility.evaluator(), post=post_data))
    matches = json.loads(content)['value']['genres']
    for item in matches:
        try:
            match.append(
                (unicode(matches[item]['id']), matches[item]['menuName']))
        except Exception:
            try:
                match.append((unicode(matches[item]['summary']['id']),
                              matches[item]['summary']['menuName']))
            except Exception:
                pass
    for genre_id, title in match:
        if video_type == 'tv':
            add.directory(title, 'genre?' + genre_id, 'list_videos', '',
                          video_type)
        elif not genre_id == '83' and video_type == 'movie':
            add.directory(title, 'genre?' + genre_id, 'list_videos', '',
                          video_type)
    xbmcplugin.endOfDirectory(plugin_handle)
示例#9
0
def genres(video_type):
    post_data = ''
    match = []
    xbmcplugin.addSortMethod(plugin_handle, xbmcplugin.SORT_METHOD_LABEL)
    if not xbmcvfs.exists(utility.session_file()):
        login.login()
    if video_type == 'tv':
        post_data = '{"paths":[["genres",83,"subgenres",{"from":0,"to":20},"summary"],["genres",83,"subgenres",' \
                    '"summary"]],"authURL":"%s"}' % utility.get_setting('authorization_url')
    elif video_type == 'movie':
        post_data = '{"paths":[["genreList",{"from":0,"to":24},["id","menuName"]],["genreList"]],"authURL":"%s"}' \
                    % utility.get_setting('authorization_url')
    else:
        pass
    content = utility.decode(connect.load_site(utility.evaluator(), post=post_data))
    matches = json.loads(content)['value']['genres']
    for item in matches:
        try:
            match.append((unicode(matches[item]['id']), matches[item]['menuName']))
        except Exception:
            try:
                match.append((unicode(matches[item]['summary']['id']), matches[item]['summary']['menuName']))
            except Exception:
                pass
    for genre_id, title in match:
        if video_type == 'tv':
            add.directory(title, utility.main_url + '/browse/genre/' + genre_id + '?bc=83', 'list_videos', '',
                          video_type)
        elif not genre_id == '83' and video_type == 'movie':
            add.directory(title, utility.main_url + '/browse/genre/' + genre_id, 'list_videos', '', video_type)
    xbmcplugin.endOfDirectory(plugin_handle)
示例#10
0
def genres(video_type):
    post_data = ''
    match = []
    xbmcplugin.addSortMethod(plugin_handle, xbmcplugin.SORT_METHOD_LABEL)
    if not xbmcvfs.exists(utility.session_file()):
        login.login()
    if video_type == 'tv':
        post_data = utility.series_genre % utility.get_setting('authorization_url')
    elif video_type == 'movie':
        post_data = utility.movie_genre % utility.get_setting('authorization_url')
    else:
        pass
    content = utility.decode(connect.load_site(utility.evaluator(), post=post_data))
    matches = json.loads(content)['value']['genres']
    for item in matches:
        try:
            match.append((unicode(matches[item]['id']), matches[item]['menuName']))
        except Exception:
            try:
                match.append((unicode(matches[item]['summary']['id']), matches[item]['summary']['menuName']))
            except Exception:
                pass
    for genre_id, title in match:
        if video_type == 'tv':
            add.directory(title, 'genre?' + genre_id, 'list_videos', '', video_type)
        elif not genre_id == '83' and video_type == 'movie':
            add.directory(title, 'genre?' + genre_id, 'list_videos', '', video_type)
    xbmcplugin.endOfDirectory(plugin_handle)
示例#11
0
def load():
    if utility.get_setting('selected_profile'):
        connect.load_site(utility.profile_switch_url +
                          utility.get_setting('selected_profile'))
        connect.save_session()
    else:
        utility.log('Load profile: no stored profile found!',
                    loglevel=xbmc.LOGERROR)
示例#12
0
def index():
    if login.login():
        add.directory(utility.get_string(30100), '', 'main', '', 'movie')
        add.directory(utility.get_string(30101), '', 'main', '', 'tv')
        add.directory(utility.get_string(30102), '', 'wi_home', '', 'both')
        if not utility.get_setting('single_profile') == 'true':
            add.directory(
                utility.get_string(30103) + ' - [COLOR FF8E0000]' + utility.get_setting('profile_name') + '[/COLOR]',
                '', 'update_displayed_profile', 'DefaultAddonService.png', '', context_enable=False)
        xbmcplugin.endOfDirectory(plugin_handle)
示例#13
0
def solve(custom_config):
    """
    Configure and then run the solver

    Args:
        custom_config, dict The custom configuration dictionary
    Returns:
        the output of the fortran function as string if successful
    """

    signature = __name__ + '.solve(custom_config)'
    logger = logging.getLogger(__name__)
    utility.log_entrance(logger, signature, {'custom_config': custom_config})

    if not custom_config:
        custom_config = {}

    hdf5_file = utility.get_setting(settings.HDF5_FILE, custom_config,
                                    'HDF5_FILE')
    utility.check_is_file(hdf5_file,
                          'The path to the hdf5 file configured by HDF5_FILE')

    n_tabulatedx = utility.get_setting(settings.GREEN_TABULATION_NUMX,
                                       custom_config, 'GREEN_TABULATION_NUMX')

    n_tabulatedz = utility.get_setting(settings.GREEN_TABULATION_NUMZ,
                                       custom_config, 'GREEN_TABULATION_NUMZ')

    n_points_simpson = utility.get_setting(
        settings.GREEN_TABULATION_SIMPSON_NPOINTS, custom_config,
        'GREEN_TABULATION_SIMPSON_NPOINTS')

    with h5py.File(hdf5_file, "a") as hdf5_db:
        if n_tabulatedx and n_tabulatedx > 0:
            dset = utility.require_dataset(
                hdf5_db,
                structure.H5_SOLVER_GREEN_TABULATION_NUMX, (1, ),
                dtype='i')
            dset[:] = n_tabulatedx

        if n_tabulatedz and n_tabulatedz > 0:
            dset = utility.require_dataset(
                hdf5_db,
                structure.H5_SOLVER_GREEN_TABULATION_NUMZ, (1, ),
                dtype='i')
            dset[:] = n_tabulatedz

        if n_points_simpson and n_points_simpson > 0:
            dset = utility.require_dataset(
                hdf5_db,
                structure.H5_SOLVER_GREEN_TABULATION_SIMPSON_NPOINTS, (1, ),
                dtype='i')
            dset[:] = n_points_simpson

        return run(hdf5_db)
示例#14
0
def index():
    if login.login():
        add.directory(utility.get_string(30100), '', 'main', '', 'movie')
        add.directory(utility.get_string(30101), '', 'main', '', 'tv')
        add.directory(utility.get_string(30102), '', 'wi_home', '', 'both')
        if not utility.get_setting('single_profile') == 'true':
            add.directory(utility.get_string(30103) + ' - [COLOR FF8E0000]' +
                          utility.get_setting('profile_name') + '[/COLOR]',
                          '',
                          'update_displayed_profile',
                          'DefaultAddonService.png',
                          '',
                          context_enable=False)
        xbmcplugin.endOfDirectory(plugin_handle)
示例#15
0
def view_activity(video_type, run_as_widget=False):
    count = 0
    video_ids = []
    loading_progress = None
    if not run_as_widget:
        loading_progress = xbmcgui.DialogProgress()
        loading_progress.create('Netflix', utility.get_string(30205) + '...')
        utility.progress_window(loading_progress, 0, '...')
    xbmcplugin.setContent(plugin_handle, 'movies')
    if not xbmcvfs.exists(utility.session_file()):
        login.login()
    content = utility.decode(
        connect.load_site(utility.main_url + '/WiViewingActivity'))
    series_id = re.compile('(<li .*?data-series=.*?</li>)',
                           re.DOTALL).findall(content)
    for i in range(1, len(series_id), 1):
        entry = series_id[i]
        if not run_as_widget:
            utility.progress_window(loading_progress,
                                    (count + 1) * 100 / len(series_id), '...')
        match_id = re.compile('data-movieid="(.*?)"', re.DOTALL).findall(entry)
        if match_id:
            video_id = match_id[0]
        match = re.compile('class="col date nowrap">(.+?)<',
                           re.DOTALL).findall(entry)
        date = match[0]
        match_title1 = re.compile('class="seriestitle">(.+?)</a>',
                                  re.DOTALL).findall(entry)
        match_title2 = re.compile('class="col title">.+?>(.+?)<',
                                  re.DOTALL).findall(entry)
        if match_title1:
            title = utility.unescape(match_title1[0]).replace('</span>', '')
        elif match_title2:
            title = match_title2[0]
        else:
            title = ''
        title = date + ' - ' + title
        if video_id not in video_ids:
            video_ids.append(video_id)
            # due to limitations in the netflix api, there is no way to get the series_id of an
            # episode, so the 4 param is set to True to treat tv episodes the same as movies.
            added = video(video_id, title, '', True, False, video_type, '')
            if added:
                count += 1
            if count == 20:
                break
    if utility.get_setting('force_view') and not run_as_widget:
        xbmc.executebuiltin('Container.SetViewMode(' +
                            utility.get_setting('view_id_activity') + ')')
    xbmcplugin.endOfDirectory(plugin_handle)
示例#16
0
def solve(custom_config):
    """
    Configure and then run the solver

    Args:
        custom_config, dict The custom configuration dictionary
    Returns:
        the output of the fortran function as string if successful
    """

    if not custom_config:
        custom_config = {}

    hdf5_file = utility.get_setting(settings.HDF5_FILE, custom_config,
                                    'HDF5_FILE')

    n_tabulatedx = utility.get_setting(settings.GREEN_TABULATION_NUMX,
                                       custom_config, 'GREEN_TABULATION_NUMX')

    n_tabulatedz = utility.get_setting(settings.GREEN_TABULATION_NUMZ,
                                       custom_config, 'GREEN_TABULATION_NUMZ')

    n_points_simpson = utility.get_setting(
        settings.GREEN_TABULATION_SIMPSON_NPOINTS, custom_config,
        'GREEN_TABULATION_SIMPSON_NPOINTS')

    utility.validate_file(hdf5_file, 'HDF5_FILE')
    with h5py.File(hdf5_file, "a") as hdf5_db:
        if n_tabulatedx and n_tabulatedx > 0:
            dset = utility.require_dataset(
                hdf5_db,
                structure.H5_SOLVER_GREEN_TABULATION_NUMX, (1, ),
                dtype='i')
            dset[:] = n_tabulatedx

        if n_tabulatedz and n_tabulatedz > 0:
            dset = utility.require_dataset(
                hdf5_db,
                structure.H5_SOLVER_GREEN_TABULATION_NUMZ, (1, ),
                dtype='i')
            dset[:] = n_tabulatedz

        if n_points_simpson and n_points_simpson > 0:
            dset = utility.require_dataset(
                hdf5_db,
                structure.H5_SOLVER_GREEN_TABULATION_SIMPSON_NPOINTS, (1, ),
                dtype='i')
            dset[:] = n_points_simpson

        return run(hdf5_db)
示例#17
0
def series_info(series_id):
    content = ''
    cache_file = xbmc.translatePath(utility.cache_dir() + series_id + '_episodes.cache')
    if xbmcvfs.exists(cache_file) and (time.time() - xbmcvfs.Stat(cache_file).st_mtime() < 60 * 5):
        file_handler = xbmcvfs.File(cache_file, 'rb')
        content = file_handler.read()
        file_handler.close()
    if not content:
        url = utility.series_url % (utility.get_setting('netflix_application'), utility.get_setting('netflix_id'),
                                    series_id)
        content = connect.load_site(url)
        file_handler = xbmcvfs.File(cache_file, 'wb')
        file_handler.write(content)
        file_handler.close()
    return utility.decode(content)
示例#18
0
def view_activity(video_type, run_as_widget=False):
    count = 0
    loading_progress = None
    if not run_as_widget:
        loading_progress = xbmcgui.DialogProgress()
        loading_progress.create('Netflix', utility.get_string(30205) + '...')
        utility.progress_window(loading_progress, 0, '...')
    xbmcplugin.setContent(plugin_handle, 'movies')
    if not xbmcvfs.exists(utility.session_file()):
        login.login()
    content = utility.decode(
        connect.load_site(utility.activity_url %
                          (utility.get_setting('api_url'),
                           utility.get_setting('authorization_url'))))
    matches = json.loads(content)['viewedItems']
    try:
        for item in matches:
            series_id = 0
            is_episode = False
            video_id = unicode(item['movieID'])
            date = item['dateStr']
            try:
                series_id = item['series']
                series_title = item['seriesTitle']
                title = item['title']
                title = series_title + ' ' + title
            except Exception:
                title = item['title']
            if not run_as_widget:
                utility.progress_window(loading_progress,
                                        (count + 1) * 500 / len(matches),
                                        title)
            title = date + ' - ' + title
            if series_id > 0:
                is_episode = True
            added = video(video_id, title, '', is_episode, False, video_type,
                          '')
            if added:
                count += 1
            if count == 20:
                break
    except Exception:
        utility.notification(utility.get_string(30306))
        pass
    if utility.get_setting('force_view') and not run_as_widget:
        xbmc.executebuiltin('Container.SetViewMode(' +
                            utility.get_setting('view_id_activity') + ')')
    xbmcplugin.endOfDirectory(plugin_handle)
示例#19
0
def postprocess(custom_config):
    """
    Configure and then run the postprocessor

    Args:
        custom_config, dict The custom configuration dictionary
    """
    signature = __name__ + '.postprocess(custom_config)'
    logger = logging.getLogger(__name__)
    utility.log_entrance(logger, signature, {'custom_config': custom_config})

    if not custom_config:
        custom_config = {}

    hdf5_file = utility.get_setting(settings.HDF5_FILE, custom_config,
                                    'HDF5_FILE')
    utility.check_is_file(hdf5_file,
                          'The path to the hdf5 file configured by HDF5_FILE')

    #utility.validate_file(hdf5_file, 'HDF5_FILE')
    with h5py.File(hdf5_file, "a") as hdf5_db:
        run(hdf5_db, custom_config)

    utility.log_and_print(
        logger, 'The post processing results are saved in the hdf5 file ' +
        utility.get_abs(hdf5_file))

    utility.log_exit(logging.getLogger(__name__), signature, [None])
示例#20
0
def postprocess(custom_config):
    """
    Configure and then run the postprocessor

    Args:
        custom_config, dict The custom configuration dictionary
    """
    signature = __name__ + '.postprocess(custom_config)'
    logger = logging.getLogger(__name__)
    utility.log_entrance(logger, signature,
                        {'custom_config': custom_config})

    if not custom_config:
        custom_config = {}

    hdf5_file = utility.get_setting(settings.HDF5_FILE, custom_config, 'HDF5_FILE')
    utility.check_is_file(hdf5_file, 'The path to the hdf5 file configured by HDF5_FILE')

    #utility.validate_file(hdf5_file, 'HDF5_FILE')
    with h5py.File(hdf5_file, "a") as hdf5_db:
        run(hdf5_db, custom_config)

    utility.log_and_print(logger, 'The post processing results are saved in the hdf5 file '
                          + utility.get_abs(hdf5_file))

    utility.log_exit(logging.getLogger(__name__), signature, [None])
示例#21
0
def series(series_id, series_title, season, single_update=True):
    filename = utility.clean_filename(series_title, ' .')
    series_file = xbmc.translatePath(utility.tv_dir() + filename)
    if not xbmcvfs.exists(series_file):
        xbmcvfs.mkdir(series_file)
    content = get.series_info(series_id)
    content = json.loads(content)['video']['seasons']
    for test in content:
        episode_season = unicode(test['seq'])
        if episode_season == season or season == '':
            season_dir = utility.create_pathname(series_file, test['title'])
            if not xbmcvfs.exists(season_dir):
                xbmcvfs.mkdir(season_dir)
            for item in test['episodes']:
                episode_id = unicode(item['episodeId'])
                episode_nr = unicode(item['seq'])
                episode_title = item['title']
                if len(episode_nr) == 1:
                    episode_nr = '0' + episode_nr
                season_nr = episode_season
                if len(season_nr) == 1:
                    season_nr = '0' + season_nr
                filename = 'S' + season_nr + 'E' + episode_nr + ' - ' + episode_title + '.strm'
                filename = utility.clean_filename(filename, ' .')
                file_handler = xbmcvfs.File(
                    utility.create_pathname(season_dir, filename), 'w')
                file_handler.write(
                    utility.encode('plugin://%s/?mode=play_video&url=%s' %
                                   (utility.addon_id, episode_id)))
                file_handler.close()
    if utility.get_setting('update_db') and single_update:
        xbmc.executebuiltin('UpdateLibrary(video)')
示例#22
0
def video(name, url, mode, thumb, video_type='', description='', duration='', year='', mpaa='', director='', genre='',
          rating=0.0, playcount=0, remove=False):
    entries = []
    filename = utility.clean_filename(url) + '.jpg'
    cover_file = xbmc.translatePath(utility.cover_cache_dir() + filename)
    fanart_file = xbmc.translatePath(utility.fanart_cache_dir() + filename)
    if xbmcvfs.exists(cover_file):
        thumb = cover_file
    u = sys.argv[0]
    u += '?url=' + urllib.quote_plus(url)
    u += '&mode=' + mode
    u += '&name=' + urllib.quote_plus(utility.encode(name))
    u += '&thumb=' + urllib.quote_plus(thumb)
    list_item = xbmcgui.ListItem(name)
    list_item.setArt({'icon': 'DefaultTVShows.png', 'thumb': thumb})
    list_item.setInfo(type='video',
                      infoLabels={'title': name, 'plot': description, 'duration': duration, 'year': int(year),
                                  'mpaa': mpaa, 'director': director, 'genre': genre, 'rating': rating,
                                  'playcount': playcount})
    if xbmcvfs.exists(fanart_file):
        list_item.setProperty('fanart_image', fanart_file)
    elif xbmcvfs.exists(cover_file):
        list_item.setProperty('fanart_image', cover_file)
    else:
        list_item.setProperty('fanart_image', utility.addon_fanart())
    if video_type == 'tvshow':
        if utility.get_setting('browse_tv_shows') == 'true':
            entries.append((utility.get_string(30151),
                            'Container.Update(plugin://%s/?mode=play_video_main&url=%s&thumb=%s)' % (
                                utility.addon_id, urllib.quote_plus(url), urllib.quote_plus(thumb))))
        else:
            entries.append((utility.get_string(30152),
                            'Container.Update(plugin://%s/?mode=list_seasons&url=%s&thumb=%s)' % (
                                utility.addon_id, urllib.quote_plus(url), urllib.quote_plus(thumb))))
    if video_type != 'episode':
        entries.append((utility.get_string(30153), 'RunPlugin(plugin://%s/?mode=play_trailer&url=%s&type=%s)' % (
            utility.addon_id, urllib.quote_plus(utility.encode(name)), video_type)))
        if remove:
            entries.append((utility.get_string(30154), 'RunPlugin(plugin://%s/?mode=remove_from_queue&url=%s)' % (
                utility.addon_id, urllib.quote_plus(url))))
        else:
            entries.append((utility.get_string(30155), 'RunPlugin(plugin://%s/?mode=add_to_queue&url=%s)' % (
                utility.addon_id, urllib.quote_plus(url))))
        entries.append((utility.get_string(30156),
                        'Container.Update(plugin://%s/?mode=list_videos&url=%s&type=movie)' % (
                            utility.addon_id, urllib.quote_plus(utility.main_url + '/WiMovie/' + url))))
        entries.append((utility.get_string(30157), 'Container.Update(plugin://%s/?mode=list_videos&url=%s&type=tv)' % (
            utility.addon_id, urllib.quote_plus(utility.main_url + '/WiMovie/' + url))))
    if video_type == 'tvshow':
        entries.append((utility.get_string(30150),
                        'RunPlugin(plugin://%s/?mode=add_series_to_library&url=&name=%s&series_id=%s)' % (
                            utility.addon_id, urllib.quote_plus(utility.encode(name.strip())), urllib.quote_plus(url))))
    elif video_type == 'movie':
        entries.append((utility.get_string(30150),
                        'RunPlugin(plugin://%s/?mode=add_movie_to_library&url=%s&name=%s)' % (
                            utility.addon_id, urllib.quote_plus(url),
                            urllib.quote_plus(utility.encode(name.strip())) + ' (' + unicode(year) + ')')))
    list_item.addContextMenuItems(entries)
    directory_item = xbmcplugin.addDirectoryItem(handle=plugin_handle, url=u, listitem=list_item, isFolder=True)
    return directory_item
示例#23
0
def series(series_id, series_title, season, single_update=True):
    filename = utility.clean_filename(series_title, ' .')
    series_file = xbmc.translatePath(utility.tv_dir() + filename)
    if not xbmcvfs.exists(series_file):
        xbmcvfs.mkdir(series_file)
    content = get.series_info(series_id)
    content = json.loads(content)['video']['seasons']
    for test in content:
        episode_season = unicode(test['seq'])
        if episode_season == season or season == '':
            season_dir = utility.create_pathname(series_file, test['title'])
            if not xbmcvfs.exists(season_dir):
                xbmcvfs.mkdir(season_dir)
            for item in test['episodes']:
                episode_id = unicode(item['episodeId'])
                episode_nr = unicode(item['seq'])
                episode_title = item['title']
                if len(episode_nr) == 1:
                    episode_nr = '0' + episode_nr
                season_nr = episode_season
                if len(season_nr) == 1:
                    season_nr = '0' + season_nr
                filename = 'S' + season_nr + 'E' + episode_nr + ' - ' + episode_title + '.strm'
                filename = utility.clean_filename(filename, ' .')
                file_handler = xbmcvfs.File(utility.create_pathname(season_dir, filename), 'w')
                file_handler.write(
                    utility.encode('plugin://%s/?mode=play_video&url=%s' % (utility.addon_id, episode_id)))
                file_handler.close()
    if utility.get_setting('update_db') and single_update:
        xbmc.executebuiltin('UpdateLibrary(video)')
示例#24
0
def preprocess(custom_config):
    """
    Configure and then run the preprocessor

    Args:
        custom_config, dict The custom configuration dictionary
    """

    if not custom_config:
        custom_config = {}

    hdf5_file = utility.get_setting(settings.HDF5_FILE, custom_config,
                                    'HDF5_FILE')
    nemoh_cal = utility.get_setting(settings.NEMOH_CALCULATIONS_FILE,
                                    custom_config, 'NEMOH_CALCULATIONS_FILE')
    input_file = utility.get_setting(settings.NEMOH_INPUT_FILE, custom_config,
                                     'NEMOH_INPUT_FILE')
    utility.validate_string(hdf5_file, 'HDF5_FILE')
    if not nemoh_cal and not input_file:
        utility.validate_file(hdf5_file, 'HDF5_FILE')

    utility.mkdir_p(os.path.abspath(os.path.dirname(hdf5_file)))

    with h5py.File(hdf5_file, "a") as hdf5_db:
        if nemoh_cal:
            utility.convert_calculations(nemoh_cal, hdf5_db)

        if input_file:
            utility.convert_input(input_file, hdf5_db)

        remove_irregular_frequencies = utility.get_setting(
            settings.REMOVE_IRREGULAR_FREQUENCIES, custom_config,
            'REMOVE_IRREGULAR_FREQUENCIES')
        if remove_irregular_frequencies is not None:
            dset = utility.require_dataset(
                hdf5_db,
                structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES, (1, ),
                dtype='i')
            utility.set_hdf5_attributes(
                dset, structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES_ATTR)
            dset[:] = int(remove_irregular_frequencies)
        else:
            settings.REMOVE_IRREGULAR_FREQUENCIES = hdf5_db.get(
                structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES)[0]

        run(hdf5_db, custom_config)
示例#25
0
def movie(movie_id, title, single_update=True):
    filename = utility.clean_filename(title + '.strm', ' .').strip(' .')
    movie_file = xbmc.translatePath(utility.movie_dir() + filename)
    file_handler = xbmcvfs.File(movie_file, 'w')
    file_handler.write(utility.encode('plugin://%s/?mode=play_video&url=%s' % (utility.addon_id, movie_id)))
    file_handler.close()
    if utility.get_setting('update_db') and single_update:
        xbmc.executebuiltin('UpdateLibrary(video)')
示例#26
0
文件: solver.py 项目: NREL/OpenWARP
def solve(custom_config):
    """
    Configure and then run the solver

    Args:
        custom_config, dict The custom configuration dictionary
    Returns:
        the output of the fortran function as string if successful
    """

    signature = __name__ + '.solve(custom_config)'
    logger = logging.getLogger(__name__)
    utility.log_entrance(logger, signature,
                        {'custom_config': custom_config})

    if not custom_config:
        custom_config = {}

    hdf5_file = utility.get_setting(settings.HDF5_FILE, custom_config, 'HDF5_FILE')
    utility.check_is_file(hdf5_file, 'The path to the hdf5 file configured by HDF5_FILE')

    n_tabulatedx = utility.get_setting(settings.GREEN_TABULATION_NUMX, custom_config,
                                       'GREEN_TABULATION_NUMX')

    n_tabulatedz = utility.get_setting(settings.GREEN_TABULATION_NUMZ, custom_config,
                                       'GREEN_TABULATION_NUMZ')

    

    n_points_simpson = utility.get_setting(settings.GREEN_TABULATION_SIMPSON_NPOINTS, custom_config,
                                           'GREEN_TABULATION_SIMPSON_NPOINTS')

    with h5py.File(hdf5_file, "a") as hdf5_db:
        if n_tabulatedx and n_tabulatedx > 0:
            dset = utility.require_dataset(hdf5_db, structure.H5_SOLVER_GREEN_TABULATION_NUMX, (1, ), dtype='i')
            dset[:] = n_tabulatedx

        if n_tabulatedz and n_tabulatedz > 0:
            dset = utility.require_dataset(hdf5_db, structure.H5_SOLVER_GREEN_TABULATION_NUMZ, (1, ), dtype='i')
            dset[:] = n_tabulatedz

        if n_points_simpson and n_points_simpson > 0:
            dset = utility.require_dataset(hdf5_db, structure.H5_SOLVER_GREEN_TABULATION_SIMPSON_NPOINTS, (1, ), dtype='i')
            dset[:] = n_points_simpson

        return run(hdf5_db)
示例#27
0
def series_info(series_id):
    content = ''
    cache_file = xbmc.translatePath(utility.cache_dir() + series_id +
                                    '_episodes.cache')
    if xbmcvfs.exists(cache_file) and (
            time.time() - xbmcvfs.Stat(cache_file).st_mtime() < 60 * 5):
        file_handler = xbmcvfs.File(cache_file, 'rb')
        content = file_handler.read()
        file_handler.close()
    if not content:
        url = utility.series_url % (utility.get_setting('netflix_application'),
                                    utility.get_setting('netflix_version'),
                                    series_id)
        content = connect.load_site(url)
        file_handler = xbmcvfs.File(cache_file, 'wb')
        file_handler.write(content)
        file_handler.close()
    return utility.decode(content)
示例#28
0
def train_with_args(scats, junction, model_to_train):
    """ Start the training process with specific arguments

    Parameters:
        scats (int): the scats site identifier
        junction (int): the VicRoads internal id for the location
        model_to_train (String): the neural network model to train
    """

    config = get_setting(
        "train"
    )  # Get the config, e.g: {'lag': 12, 'batch': 256, 'epochs': 600}
    print(f"(train.py) CONFIG: {config}")
    file_directory = 'model/' + model_to_train
    if scats != "All":
        junctions = SCATS_DATA.get_scats_approaches(
            scats)  # Get array of scats approaches, e.g: [1, 3, 5, 7]
        print(f"(train.py) SCATS SITES: {junctions}")
        file_directory = f"{file_directory}/{scats}/"
        filename = junction
        if junction != "All":  # If the junction in args is not all...
            junctions = [junction]
            print(f"(train.py) SCATS SITES: {junctions}"
                  )  # ... set args to be the junctions e.g.: ['1']
            # TODO: Determine if strings are an issue here
        for junction in junctions:
            print("Training {0}/{1} using a {2} model...".format(
                scats, junction, model_to_train))
            x_train, y_train, _, _, _ = process_data(scats, junction,
                                                     config["lag"])
    else:
        file_directory = f"{file_directory}/Generalised/"
        filename = "Model"
        print("Training a generalised {0} model...".format(model_to_train))
        x_train, y_train = SCATS_DATA.get_training_data()
        scats_site = "All"
        junction = "All"

    print(
        f"(train.py) XTRAIN[0]: {x_train[0][:10]} \n XTRAIN[1]: {x_train[1][:10]} \n YTRAIN: {y_train[:10]}"
    )
    print(
        f"(traint.py) XTRAIN SHAPE: {x_train.shape} \n YTRAIN SHAPE: {y_train.shape}"
    )

    if os.path.isfile(f"{file_directory}{filename}.h5"):
        m = load_model(f"{file_directory}{filename}.h5")
    else:
        input_shape = (x_train.shape[1], )
        m = generate_new_model(model_to_train, input_shape)

    if model_to_train == 'seas':
        x_train = np.reshape(x_train, (x_train.shape[0], x_train.shape[1]))
        train_seas(m, x_train, y_train, file_directory, filename, config)
    else:
        x_train = np.reshape(x_train, (x_train.shape[0], x_train.shape[1], 1))
        train_model(m, x_train, y_train, file_directory, filename, config)
示例#29
0
def solve(custom_config):
    """
    Configure and then run the solver

    Args:
        custom_config, dict The custom configuration dictionary
    Returns:
        the output of the fortran function as string if successful
    """

    if not custom_config:
        custom_config = {}

    hdf5_file = utility.get_setting(settings.HDF5_FILE, custom_config, 'HDF5_FILE')

    n_tabulatedx = utility.get_setting(settings.GREEN_TABULATION_NUMX, custom_config,
                                       'GREEN_TABULATION_NUMX')

    n_tabulatedz = utility.get_setting(settings.GREEN_TABULATION_NUMZ, custom_config,
                                       'GREEN_TABULATION_NUMZ')

    

    n_points_simpson = utility.get_setting(settings.GREEN_TABULATION_SIMPSON_NPOINTS, custom_config,
                                           'GREEN_TABULATION_SIMPSON_NPOINTS')

    utility.validate_file(hdf5_file, 'HDF5_FILE')
    with h5py.File(hdf5_file, "a") as hdf5_db:
        if n_tabulatedx and n_tabulatedx > 0:
            dset = utility.require_dataset(hdf5_db, structure.H5_SOLVER_GREEN_TABULATION_NUMX, (1, ), dtype='i')
            dset[:] = n_tabulatedx

        if n_tabulatedz and n_tabulatedz > 0:
            dset = utility.require_dataset(hdf5_db, structure.H5_SOLVER_GREEN_TABULATION_NUMZ, (1, ), dtype='i')
            dset[:] = n_tabulatedz

        if n_points_simpson and n_points_simpson > 0:
            dset = utility.require_dataset(hdf5_db, structure.H5_SOLVER_GREEN_TABULATION_SIMPSON_NPOINTS, (1, ), dtype='i')
            dset[:] = n_points_simpson



        return run(hdf5_db)
示例#30
0
def view_activity(video_type, run_as_widget=False):
    count = 0
    loading_progress = None
    if not run_as_widget:
        loading_progress = xbmcgui.DialogProgress()
        loading_progress.create('Netflix', utility.get_string(30205) + '...')
        utility.progress_window(loading_progress, 0, '...')
    xbmcplugin.setContent(plugin_handle, 'movies')
    if not xbmcvfs.exists(utility.session_file()):
        login.login()
    content = utility.decode(connect.load_site(utility.activity_url % (
    utility.get_setting('netflix_application'), utility.get_setting('netflix_id'),
    utility.get_setting('authorization_url'))))
    matches = json.loads(content)['viewedItems']
    try:
        for item in matches:
            series_id = 0
            is_episode = False
            video_id = unicode(item['movieID'])
            date = item['dateStr']
            try:
                series_id = item['series']
                series_title = item['seriesTitle']
                title = item['title']
                title = series_title + ' ' + title
            except Exception:
                title = item['title']
            if not run_as_widget:
                utility.progress_window(loading_progress, (count + 1) * 500 / len(matches), title)
            title = date + ' - ' + title
            if series_id > 0:
                is_episode = True
            added = video(video_id, title, '', is_episode, False, video_type, '')
            if added:
                count += 1
            if count == 20:
                break
    except Exception:
        utility.notification(utility.get_string(30306))
        pass
    if utility.get_setting('force_view') and not run_as_widget:
        xbmc.executebuiltin('Container.SetViewMode(' + utility.get_setting('view_id_activity') + ')')
    xbmcplugin.endOfDirectory(plugin_handle)
示例#31
0
def movie(movie_id, title, single_update=True):
    filename = utility.clean_filename(title + '.strm', ' .').strip(' .')
    movie_file = xbmc.translatePath(utility.movie_dir() + filename)
    file_handler = xbmcvfs.File(movie_file, 'w')
    file_handler.write(
        utility.encode('plugin://%s/?mode=play_video&url=%s' %
                       (utility.addon_id, movie_id)))
    file_handler.close()
    if utility.get_setting('update_db') and single_update:
        xbmc.executebuiltin('UpdateLibrary(video)')
示例#32
0
def view_activity(video_type, run_as_widget=False):
    count = 0
    video_ids = []
    loading_progress = None
    if not run_as_widget:
        loading_progress = xbmcgui.DialogProgress()
        loading_progress.create('Netflix', utility.get_string(30205) + '...')
        utility.progress_window(loading_progress, 0, '...')
    xbmcplugin.setContent(plugin_handle, 'movies')
    if not xbmcvfs.exists(utility.session_file()):
        login.login()
    content = utility.decode(connect.load_site(utility.main_url + '/WiViewingActivity'))
    series_id = re.compile('(<li .*?data-series=.*?</li>)', re.DOTALL).findall(content)
    for i in range(1, len(series_id), 1):
        entry = series_id[i]
        if not run_as_widget:
            utility.progress_window(loading_progress, (count + 1) * 100 / len(series_id), '...')
        match_id = re.compile('data-movieid="(.*?)"', re.DOTALL).findall(entry)
        if match_id:
            video_id = match_id[0]
        match = re.compile('class="col date nowrap">(.+?)<', re.DOTALL).findall(entry)
        date = match[0]
        match_title1 = re.compile('class="seriestitle">(.+?)</a>', re.DOTALL).findall(entry)
        match_title2 = re.compile('class="col title">.+?>(.+?)<', re.DOTALL).findall(entry)
        if match_title1:
            title = utility.unescape(match_title1[0]).replace('</span>', '')
        elif match_title2:
            title = match_title2[0]
        else:
            title = ''
        title = date + ' - ' + title
        if video_id not in video_ids:
            video_ids.append(video_id)
            # due to limitations in the netflix api, there is no way to get the series_id of an
            # episode, so the 4 param is set to True to treat tv episodes the same as movies.
            added = video(video_id, title, '', True, False, video_type, '')
            if added:
                count += 1
            if count == 20:
                break
    if utility.get_setting('force_view') and not run_as_widget:
        xbmc.executebuiltin('Container.SetViewMode(' + utility.get_setting('view_id_activity') + ')')
    xbmcplugin.endOfDirectory(plugin_handle)
示例#33
0
def main(argv):
    parser = argparse.ArgumentParser()
    parser.add_argument("--scats", default=970, help="SCATS site number.")
    parser.add_argument("--junction",
                        default=1,
                        help="The approach to the site.")
    args = parser.parse_args()

    models = []
    untrained_models = []
    model_names = ['LSTM', 'GRU', 'SAEs', 'FEEDFWD', 'DEEPFEEDFWD']
    """ Getting the trained models is split into two parts 
        because of some issues when removing items from a list that is being iterated over """
    for name in model_names:
        # Construct the path to the file
        file = "model/{0}/{1}/{2}.h5".format(name.lower(), args.scats,
                                             args.junction)

        if os.path.exists(file):
            models.append(load_model(file))
        else:
            untrained_models.append(name)

    for name in untrained_models:
        # Remove all untrained models so they are not included on the graph
        model_names.remove(name)

    lag = get_setting("train")["lag"]
    _, _, x_test, y_test, scaler = process_data(args.scats, args.junction, lag)
    y_test = scaler.inverse_transform(y_test.reshape(-1, 1)).reshape(1, -1)[0]

    y_preds = []
    mtx = []
    for name, model in zip(model_names, models):
        if name == 'SAEs':
            x_test = np.reshape(x_test, (x_test.shape[0], x_test.shape[1]))
        else:
            x_test = np.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1))
        file = 'images/' + name + '.png'
        plot_model(model, to_file=file, show_shapes=True)
        predicted = model.predict(x_test)
        predicted = scaler.inverse_transform(predicted.reshape(-1, 1)).reshape(
            1, -1)[0]
        y_preds.append(predicted[:96])
        print(f"X_TEST: {x_test[0]}")
        print(name)
        mtx.append(eva_regress(y_test, predicted))

    plot_results(y_test[:96], y_preds, model_names)
    plot_error(mtx)
    mae = "mae"
    print(f"\nMTX: {mtx}")
    print(f"\n{mtx[0][mae]}")
    print(f"\n{mtx[0].keys()}")
示例#34
0
def genres(video_type):
    post_data = ''
    match = []
    xbmcplugin.addSortMethod(plugin_handle, xbmcplugin.SORT_METHOD_LABEL)
    if not xbmcvfs.exists(utility.session_file()):
        login.login()
    if video_type == 'tv':
        post_data = '{"paths":[["genres",83,"subgenres",{"from":0,"to":20},"summary"],["genres",83,"subgenres",' \
                    '"summary"]],"authURL":"%s"}' % utility.get_setting('authorization_url')
    elif video_type == 'movie':
        post_data = '{"paths":[["genreList",{"from":0,"to":24},["id","menuName"]],["genreList"]],"authURL":"%s"}' \
                    % utility.get_setting('authorization_url')
    else:
        pass
    content = utility.decode(
        connect.load_site(utility.evaluator_url %
                          (utility.get_setting('netflix_application'),
                           utility.get_setting('netflix_version')),
                          post=post_data))
    matches = json.loads(content)['value']['genres']
    for k in matches:
        try:
            match.append((unicode(matches[k]['id']), matches[k]['menuName']))
        except Exception:
            try:
                match.append((unicode(matches[k]['summary']['id']),
                              matches[k]['summary']['menuName']))
            except Exception:
                pass
    for genre_id, title in match:
        if video_type == 'tv':
            add.directory(
                title,
                utility.main_url + '/browse/genre/' + genre_id + '?bc=83',
                'list_videos', '', video_type)
        elif not genre_id == '83' and video_type == 'movie':
            add.directory(title,
                          utility.main_url + '/browse/genre/' + genre_id,
                          'list_videos', '', video_type)
    xbmcplugin.endOfDirectory(plugin_handle)
示例#35
0
def preprocess(custom_config):
    """
    Configure and then run the preprocessor

    Args:
        custom_config, dict The custom configuration dictionary
    """

    if not custom_config:
        custom_config = {}

    hdf5_file = utility.get_setting(settings.HDF5_FILE, custom_config, 'HDF5_FILE')
    nemoh_cal = utility.get_setting(settings.NEMOH_CALCULATIONS_FILE, custom_config, 'NEMOH_CALCULATIONS_FILE')
    input_file = utility.get_setting(settings.NEMOH_INPUT_FILE, custom_config, 'NEMOH_INPUT_FILE')
    utility.validate_string(hdf5_file, 'HDF5_FILE')
    if not nemoh_cal and not input_file:
        utility.validate_file(hdf5_file, 'HDF5_FILE')

    utility.mkdir_p(os.path.abspath(os.path.dirname(hdf5_file)))

    with h5py.File(hdf5_file, "a") as hdf5_db:
        if nemoh_cal:
            utility.convert_calculations(nemoh_cal, hdf5_db)

        if input_file:
            utility.convert_input(input_file, hdf5_db)

        remove_irregular_frequencies = utility.get_setting(settings.REMOVE_IRREGULAR_FREQUENCIES, custom_config,
                                       'REMOVE_IRREGULAR_FREQUENCIES')
        if remove_irregular_frequencies is not None:
            dset = utility.require_dataset(hdf5_db, structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES, (1, ), dtype='i')
            utility.set_hdf5_attributes(dset, structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES_ATTR)
            dset[:] = int(remove_irregular_frequencies)
        else:
            settings.REMOVE_IRREGULAR_FREQUENCIES = hdf5_db.get(structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES)[0]



        run(hdf5_db, custom_config)
示例#36
0
def video_info(video_id):
    content = ''
    cache_file = xbmc.translatePath(utility.cache_dir() + video_id + '.cache')
    if xbmcvfs.exists(cache_file):
        file_handler = xbmcvfs.File(cache_file, 'rb')
        content = file_handler.read()
        file_handler.close()
    if not content:
        post_data = utility.video_info % (video_id, video_id, video_id, video_id,
                                         utility.get_setting('authorization_url'))
        content = connect.load_site(utility.evaluator(), post=post_data)
        file_handler = xbmcvfs.File(cache_file, 'wb')
        file_handler.write(content)
        file_handler.close()
    return utility.decode(content)
示例#37
0
def postprocess(custom_config):
    """
    Configure and then run the postprocessor

    Args:
        custom_config, dict The custom configuration dictionary
    """

    if not custom_config:
        custom_config = {}

    hdf5_file = utility.get_setting(settings.HDF5_FILE, custom_config, 'HDF5_FILE')

    utility.validate_file(hdf5_file, 'HDF5_FILE')
    with h5py.File(hdf5_file, "a") as hdf5_db:
        run(hdf5_db, custom_config)
示例#38
0
def add(video_id):
    post_data = utility.my_list % utility.get_setting('authorization_url')
    content = utility.decode(
        connect.load_site(utility.evaluator(), post=post_data))
    match = json.loads(content)['value']['videos']
    headers = {
        'Access-Control-Request-Headers': 'content-type, accept',
        'Access-Control-Request-Method': 'POST',
        'Origin': 'http://www.netflix.com'
    }
    content = utility.decode(
        connect.load_site(utility.evaluator() + '&method=call',
                          options=True,
                          headers=headers))
    print content
    cookies = {
        'lhpuuidh-browse-' + utility.get_setting('selected_profile'):
        urllib.quote_plus(
            utility.get_setting('language').split('-')[1] + ':' +
            utility.get_setting('language').upper() + ':' +
            utility.get_setting('root_list')),
        'SecureNetflixId':
        'v%3D2%26mac%3DAQEAEQABABRkPnYy2LvtMo02JH3beZhI4vKJAM2mLeM.%26dt%3D1449696369549'
    }
    post_data = utility.add_list % (
        utility.get_setting('root_list'), utility.get_setting('my_list'),
        video_id, utility.get_setting('track_id'), unicode(
            len(match)), utility.get_setting('authorization_url'))
    headers = {
        'Referer': 'http://www.netflix.com/browse',
        'Origin': 'http://www.netflix.com'
    }
    print post_data
    content = utility.decode(
        connect.load_site(utility.evaluator() + '&method=call',
                          cookies=cookies,
                          headers=headers,
                          post=post_data))
    print content
示例#39
0
def video_info(video_id):
    content = ''
    cache_file = xbmc.translatePath(utility.cache_dir() + video_id + '.cache')
    if xbmcvfs.exists(cache_file):
        file_handler = xbmcvfs.File(cache_file, 'rb')
        content = file_handler.read()
        file_handler.close()
    if not content:
        postdata = '{"paths":[["videos",{media_id},["bookmarkPosition","details","episodeCount","maturity","queue",' \
                   '"releaseYear","requestId","runtime","seasonCount","summary","title","userRating","watched"]],' \
                   '["videos",{media_id},"boxarts",["_342x192","_665x375"],"jpg"]],"authURL":"{authorization_url}"}' \
            .replace('{media_id}', video_id).replace('{authorization_url}', utility.get_setting('authorization_url'))
        content = connect.load_site(utility.evaluator(), post=postdata)
        file_handler = xbmcvfs.File(cache_file, 'wb')
        file_handler.write(content)
        file_handler.close()
    return utility.decode(content)
示例#40
0
    def set_text(self, main):
        """ Sets the text for all the controls

        Parameters:
            main  (QMainWindow): the parent object for the interface
        """

        self.model_label.setFont(DEFAULT_FONT)
        self.model_combo_box.setFont(DEFAULT_FONT)
        self.scats_number_label.setFont(DEFAULT_FONT)
        self.scats_number_combo_box.setFont(DEFAULT_FONT)
        self.junction_label.setFont(DEFAULT_FONT)
        self.junction_combo_box.setFont(DEFAULT_FONT)
        self.lag_label.setFont(LABEL_FONT)
        self.batches_label.setFont(LABEL_FONT)
        self.epochs_label.setFont(LABEL_FONT)
        self.train_push_button.setFont(DEFAULT_FONT)
        self.continue_train_checkbox.setFont(DEFAULT_FONT)
        self.output_text_edit.setFont(OUTPUT_FONT)

        translate = QtCore.QCoreApplication.translate
        config = get_setting("train")

        main.setWindowTitle(translate("main_window", "TFPS - Train Model"))
        self.model_label.setText(translate("main_window", "Model"))
        self.scats_number_label.setText(
            translate("main_window", "Scats Number"))
        self.junction_label.setText(translate("main_window", "Junction"))
        self.lag_label.setText(translate("mainWindow", "Lag"))
        self.lag_value_label.setText(
            translate("mainWindow", str(config["lag"])))
        self.batches_label.setText(translate("mainWindow", "Batches"))
        self.batches_value_label.setText(
            translate("mainWindow", str(config["batch"])))
        self.epochs_label.setText(translate("mainWindow", "Epochs"))
        self.epochs_value_label.setText(
            translate("mainWindow", str(config["epochs"])))
        self.train_push_button.setText(translate("main_window", "Train"))
        self.continue_train_checkbox.setText("Continue Training")
示例#41
0
def add(video_id):
    post_data = utility.my_list % utility.get_setting('authorization_url')
    content = utility.decode(connect.load_site(utility.evaluator(), post=post_data))
    match = json.loads(content)['value']['videos']
    headers = {'Access-Control-Request-Headers': 'content-type, accept','Access-Control-Request-Method': 'POST',
               'Origin': 'http://www.netflix.com'}
    content = utility.decode(connect.load_site(utility.evaluator()+'&method=call', options=True, headers=headers))
    print content
    cookies = {'lhpuuidh-browse-' + utility.get_setting('selected_profile'): urllib.quote_plus(utility.get_setting('language').split('-')[1] + ':' + utility.get_setting('language').upper() + ':' + utility.get_setting('root_list')), 'SecureNetflixId': 'v%3D2%26mac%3DAQEAEQABABRkPnYy2LvtMo02JH3beZhI4vKJAM2mLeM.%26dt%3D1449696369549'}
    post_data = utility.add_list % (utility.get_setting('root_list'),
                                    utility.get_setting('my_list'),
                                    video_id,
                                    utility.get_setting('track_id'),
                                    unicode(len(match)),
                                    utility.get_setting('authorization_url'))
    headers = {'Referer': 'http://www.netflix.com/browse',
               'Origin': 'http://www.netflix.com'}
    print post_data
    content = utility.decode(connect.load_site(utility.evaluator()+'&method=call',
                                               cookies=cookies,
                                               headers=headers,
                                               post=post_data))
    print content
示例#42
0
def read_mesh(hdf5_data, custom_config):
    """
    Read the mesh data from the hdf5 file
    Args:
        hdf5_data: object, the hdf5 opened file

    Return:
        the mesh data
    """
    n_points=0
    n_panels=0
    bodies = hdf5_data.get(structure.H5_BODIES).values()
    n_bodies = len(bodies)

    interior_mesh_points = np.empty((3, 0))
    interior_mesh_panels = np.empty((4, 0))
    interior_c_panels = np.empty((0))
    interior_n_points = 0
    interior_n_panels = 0
    remove_irregular_frequencies = utility.get_setting(settings.REMOVE_IRREGULAR_FREQUENCIES, custom_config,
                                       'REMOVE_IRREGULAR_FREQUENCIES')
    for c in range(n_bodies):
        body = bodies[c]
        n_points += body.get(structure.H5_BODY_NUM_POINTS)[0]
        n_panels += body.get(structure.H5_BODY_NUM_PANELS)[0]

    mesh = TMesh(n_points=n_points, n_panels=n_panels, n_bodies=n_bodies)

    n_points = 0
    n_panels = 0

    for c in range(n_bodies):
        body = bodies[c]

        mesh_arr = body.get(structure.H5_BODY_MESH)

        n = mesh_arr[0, 1]

        if c > 0 and (n != mesh.i_sym):
            print(' Error: there is an inconsistency in the mesh files regarding the xOz symmetries')
            sys.exit()
        else:
            mesh.i_sym = int(n)

        m = body.get(structure.H5_BODY_NUM_POINTS)[0]
        n = body.get(structure.H5_BODY_NUM_PANELS)[0]

        for i in range(m):
            mesh.x[:, n_points + i] = np.array(mesh_arr[i + 1, 1:4])

        if remove_irregular_frequencies:
            # If we have to remove frequencies, then we need to discretize the free surface
            int_mesh = generate_mesh(np.asarray(mesh_arr[1:m, 1:4]))
            interior_mesh_points = np.concatenate((interior_mesh_points, int_mesh["x"]), axis=1)
            interior_mesh_panels = np.concatenate((interior_mesh_panels, int_mesh["p"]+mesh.n_points+interior_n_points), axis=1)
            interior_c_panels = np.concatenate((interior_c_panels, c*np.ones(int_mesh["n_panels"])), axis=0)
            interior_n_points += int_mesh["n_points"]
            interior_n_panels += int_mesh["n_panels"]

        for i in range(m, m+n):
            mesh.p[:, n_panels+i-m] = np.array(mesh_arr[i + 1, 0:4]) - 1
            for j in range(4):
                mesh.p[j, n_panels + i-m] += n_points
            mesh.c_panel[n_panels+i-m] = c

        n_points += m
        n_panels += n
        mesh.last_panel[c] = n_panels

    if remove_irregular_frequencies:
        # If we have to remove frequencies, then we need to extend the mesh so
        # that it contains the panels of the free surface too
        mesh_interior = TMesh(n_points=n_points +interior_n_points , n_panels=n_panels + interior_n_panels, n_bodies=n_bodies)
        mesh_interior.x[:, 0:n_points] = mesh.x
        mesh_interior.x[:, n_points:] = interior_mesh_points
        mesh_interior.p[:, 0:n_panels] = mesh.p
        mesh_interior.p[:, n_panels:] = interior_mesh_panels
        mesh_interior.last_panel = mesh.last_panel
        mesh_interior.c_panel[0:n_panels] = mesh.c_panel
        mesh_interior.c_panel[n_panels: ] = interior_c_panels
        mesh_interior.i_sym = mesh.i_sym
        mesh = mesh_interior


        is_interior_domain = np.zeros((n_panels + interior_n_panels))
        is_interior_domain[n_panels:] = 1

        dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_IS_INTERIOR_DOMAIN, is_interior_domain.shape, dtype='i')
        utility.set_hdf5_attributes(dset, structure.H5_SOLVER_IS_INTERIOR_DOMAIN_ATTR)
        dset[:] = is_interior_domain

        n_panels += interior_n_panels
        n_points += interior_n_points




    for i in range(mesh.n_panels):
        u = mesh.x[:, mesh.p[1, i]] - mesh.x[:, mesh.p[0, i]]
        v = mesh.x[:, mesh.p[3, i]] - mesh.x[:, mesh.p[1, i]]
        w1 = np.cross(u, v)
        a1 = 0.5*np.linalg.norm(w1)

        u = mesh.x[:, mesh.p[3, i]] - mesh.x[:, mesh.p[2, i]]
        v = mesh.x[:, mesh.p[1, i]] - mesh.x[:, mesh.p[2, i]]
        w2 = np.cross(u, v)
        a2 = 0.5*np.linalg.norm(w2)

        mesh.a[i]= a1+a2

        if mesh.a[i] < utility.EPS:
            print('Error: surface of panel ' + str(i) + ' is too small (' + str(mesh.a[i]) + ')')
            sys.exit()

        mesh.xm[:, i] = (1./3)*(mesh.x[:, mesh.p[0, i]] + mesh.x[:, mesh.p[1, i]] + mesh.x[:, mesh.p[3, i]])*a1/mesh.a[i]

        mesh.xm[:, i] += (1./3)*(mesh.x[:, mesh.p[1, i]] + mesh.x[:, mesh.p[2, i]] + mesh.x[:, mesh.p[3, i]])*a2/mesh.a[i]

        u = w1 + w2

        mesh.n[:, i] = u/np.linalg.norm(u)

    return mesh
示例#43
0
def run(hdf5_data, custom_config):
    """
    This function run the preprocessor
    Args:
        hdf5_data: object, the hdf5 opened file
        custom_config, dict The custom configuration dictionary
    """
    n_radiation = 0
    n_integration = 0

    bodies = hdf5_data.get(structure.H5_BODIES)

    if not bodies:
        print('The bodies dataset is not found. It looks like your hdf5 file is not correct. Please set ',
        'NEMOH_CALCULATIONS_FILE and NEMOH_INPUT_FILE to a valid value prior to running the preprocessor ',
        'Alternatively, you could manually add the input')
        sys.exit(1)
    bodies = bodies.values()

    for body in bodies:
        n_radiation += body.get(structure.H5_FREEDOM_DEGREE).shape[0]
        n_integration += body.get(structure.H5_GENERALISED_FORCES).shape[0]

    n_w = hdf5_data.get(structure.H5_NUM_WAVE_FREQUENCIES)[0]
    w_min = hdf5_data.get(structure.H5_MIN_WAVE_FREQUENCIES)[0]
    w_max = hdf5_data.get(structure.H5_MAX_WAVE_FREQUENCIES)[0]
    w = np.zeros(n_w, settings.NEMOH_FLOAT)
    if n_w > 1:
        for j in range(n_w):
            w[j] = w_min+(w_max-w_min)*j/(n_w-1)
    else:
        w[0] = w_min

    n_beta = hdf5_data.get(structure.H5_NUM_WAVE_DIRECTIONS)[0]
    beta_min = hdf5_data.get(structure.H5_MIN_WAVE_DIRECTIONS)[0]
    beta_max = hdf5_data.get(structure.H5_MAX_WAVE_DIRECTIONS)[0]

    beta = np.zeros(n_beta, settings.NEMOH_FLOAT)

    if n_beta > 1:
        for j in range(n_beta):
            beta[j] = (beta_min+(beta_max-beta_min)*j/(n_beta-1))*math.pi/180.
    else:
        beta[0] = beta_min * math.pi/180.

    switch_potential = hdf5_data.get(structure.H5_SHOW_PRESSURE)[0] >= 1
    n_theta = hdf5_data.get(structure.H5_KOCHIN_NUMBER)[0]
    theta_min = hdf5_data.get(structure.H5_KOCHIN_MIN)[0]
    theta_max = hdf5_data.get(structure.H5_KOCHIN_MAX)[0]
    switch_kochin = n_theta > 0

    n_x = hdf5_data.get(structure.H5_FREE_SURFACE_POINTS_X)[0]
    n_y = hdf5_data.get(structure.H5_FREE_SURFACE_POINTS_Y)[0]
    l_x = hdf5_data.get(structure.H5_FREE_SURFACE_DIMENSION_X)[0]
    l_y = hdf5_data.get(structure.H5_FREE_SURFACE_DIMENSION_Y)[0]

    switch_free_surface = n_x > 0

    rad_case = [TCase() for x in range(n_radiation)]
    int_case = [TCase() for x in range(n_integration)]
    j_rad = 0
    j_int = 0

    for c in range(len(bodies)):
        body = bodies[c]
        freedom_degree = body.get(structure.H5_FREEDOM_DEGREE)
        m = freedom_degree.len()
        for i in range(m):
            case = TCase()
            case.i_case = freedom_degree[i, 0]
            case.direction = np.array(freedom_degree[i, 1:4])
            case.axis = np.array(freedom_degree[i, 4:7])
            case.i_body = c
            case.mode = i
            rad_case[j_rad + i] = case
        j_rad += m

        generalised_forces = body.get(structure.H5_GENERALISED_FORCES)
        m = generalised_forces.len()
        for i in range(m):
            case = TCase()
            case.i_case = generalised_forces[i, 0]
            case.direction = np.array(generalised_forces[i, 1:4])
            case.axis = np.array(generalised_forces[i, 4:7])
            case.i_body = c
            case.mode = i
            int_case[j_int + i] = case

        j_int += m

    print('')
    print('Summary of calculation')

    depth = hdf5_data.get(structure.H5_ENV_DEPTH)[0]
    if depth > 0:
        print('  ->  Water depth = ' + str(depth) + ' m')
    else:
        print('  ->  Infinite water depth')

    print('  -> ' + str(n_w) + ' wave frequencies from ' + str(w[0]) + ' to ' + str(w[n_w-1]))
    print('  -> ' + str(n_beta) + str(' wave directions from  ') + str(beta[0]) + ' to ' + str(beta[n_beta-1]))
    print('  -> ' + str(n_radiation) + ' radiation problems')
    print('  -> ' + str(n_integration) + ' forces')
    print('')

    mesh = read_mesh(hdf5_data, custom_config)
    write_mesh_l12(mesh, hdf5_data)
    write_mesh_l10(mesh, hdf5_data)

    mesh_tec_file = utility.get_setting(settings.MESH_TEC_FILE, custom_config, 'MESH_TEC_FILE')

    if mesh_tec_file:
        write_mesh_tec(mesh, mesh_tec_file)

    fnds = np.zeros((n_integration, mesh.n_panels*2**mesh.i_sym), settings.NEMOH_FLOAT)

    for j in range(n_integration):
        fnds[j, :] = compute_nds(mesh, int_case[j].body, int_case[j].i_case, int_case[j].direction, int_case[j].axis)

    dset = utility.require_dataset(hdf5_data, structure.H5_MESH_INTEGRATION, fnds.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_MESH_INTEGRATION_ATTR)
    dset[:, :] = fnds

    environment = utility.read_environment(hdf5_data)

    normal_velocity = np.zeros((mesh.n_panels*2**mesh.i_sym, (n_beta+n_radiation)*n_w), settings.NEMOH_COMPLEX)
    fk_force = np.zeros((n_w, n_beta, n_integration), settings.NEMOH_COMPLEX)

    for i in range(n_w):
        for j in range(n_beta):

            result = compute_wave(mesh, w[i], beta[j], environment)
            pressure = result["pressure"]
            n_vel = result["n_vel"]
            normal_velocity[:, j+ i*(n_beta+n_radiation)] = n_vel
            # Calculate the corresponding FK forces
            for k in range(n_integration):
                #for c in range(mesh.n_panels*2**mesh.i_sym):
                    #fk_force[i, j, k] +=  pressure[c]*fnds[k, c]

                fk_force[i, j, k] = np.sum(pressure.flatten()*fnds[k, :].flatten())

        for j in range(n_radiation):
            n_vel = compute_radiation_condition(mesh, rad_case[j].body, rad_case[j].i_case, rad_case[j].direction,
                                        rad_case[j].axis)

            normal_velocity[:, j + n_beta + i*(n_beta+n_radiation)] = n_vel

    # Save body conditions
    n_problems = n_w*(n_radiation+n_beta)
    bc_omega = w.repeat(n_beta + n_radiation)
    dset = utility.require_dataset(hdf5_data, structure.H5_NORMAL_VELOCITY_W, bc_omega.shape, dtype='f', maxshape=(None))
    utility.set_hdf5_attributes(dset, structure.H5_NORMAL_VELOCITY_W_ATTR)
    dset[:] = bc_omega

    bc_switch_type = -np.ones(n_problems, dtype='f')
    bc_switch_type[0:bc_switch_type.shape[0]:n_beta + n_radiation] = beta
    dset = utility.require_dataset(hdf5_data, structure.H5_NORMAL_VELOCITY_BETA, bc_switch_type.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_NORMAL_VELOCITY_BETA_ATTR)
    dset[:] = bc_switch_type


    temp = int(switch_potential)*np.ones(n_problems, dtype='i')
    dset = utility.require_dataset(hdf5_data, structure.H5_NORMAL_VELOCITY_SWITCH_POTENTIAL, temp.shape, dtype='i')
    utility.set_hdf5_attributes(dset, structure.H5_NORMAL_VELOCITY_SWITCH_POTENTIAL_ATTR)
    dset[:] = temp

    temp = int(switch_free_surface)*np.ones(n_problems, dtype='i')
    dset = utility.require_dataset(hdf5_data, structure.H5_NORMAL_VELOCITY_SWITCH_FREE_SURFACE, temp.shape, dtype='i')
    utility.set_hdf5_attributes(dset, structure.H5_NORMAL_VELOCITY_SWITCH_FREE_SURFACE_ATTR)
    dset[:] = temp

    temp = int(switch_kochin)*np.ones(n_problems, dtype='i')
    dset = utility.require_dataset(hdf5_data, structure.H5_NORMAL_VELOCITY_SWITCH_KOCHIN, temp.shape, dtype='i')
    utility.set_hdf5_attributes(dset, structure.H5_NORMAL_VELOCITY_SWITCH_KOCHIN_ATTR)
    dset[:] = temp

    dset = utility.require_dataset(hdf5_data, structure.H5_NORMAL_VELOCITY_VELOCITIES, normal_velocity.shape, dtype='F')
    utility.set_hdf5_attributes(dset, structure.H5_NORMAL_VELOCITY_VELOCITIES_ATTR)
    dset[:, :] = normal_velocity


    #fk_force_f = fk_force.flatten()
    #fk_force_o = np.vstack((np.abs(fk_force_f), np.arctan2(np.imag(fk_force_f), np.real(fk_force_f)))).transpose()
    fk_force_o = np.zeros((n_integration*n_w, 2*n_beta+2*n_radiation), dtype='f')
    idx = 0
    for k in range(n_integration):
        for i in range(n_w):
            for c in range(n_beta):
                fk_force_o[idx, 2*c] = np.abs(fk_force[i, c, k])
                fk_force_o[idx, 2*c+1] = np.arctan2(np.imag(fk_force[i, c, k]), np.real(fk_force[i, c, k]))

            for c in range(2*n_radiation):
                fk_force_o[idx, 2*n_beta + c] = 0
            idx += 1


    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_FK_FORCES, fk_force_o.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_FK_FORCES_ATTR)
    dset[:, :] = fk_force_o

    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_FK_FORCES_RAW, fk_force.shape, dtype='F')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_FK_FORCES_RAW_ATTR)
    dset[:, :, :] = fk_force

    fk_force_tec_file = utility.get_setting(settings.FK_FORCE_TEC_FILE, custom_config, 'FK_FORCE_TEC_FILE')
    if fk_force_tec_file:
        write_fk_force_tec(int_case, fk_force, w, beta, fk_force_tec_file)

    #free_surface_v = [[-0.5*l_x+l_x*i/(n_x-1), -0.5*l_y+l_y*j/(n_y-1), 0.] for i in range(n_x) for j in range(
    #    n_y)]
    free_surface_v = np.zeros((3, n_x*n_y))
    k = 0
    for i in range(n_x):
        for j in range(n_y):
            free_surface_v[0, k] = -0.5*l_x+l_x*i/(n_x-1)
            free_surface_v[1, k] = -0.5*l_y+l_y*j/(n_y-1)
            free_surface_v[2, k] = 0.
            k += 1

    #free_surface_v = np.array(free_surface_v)
    dset = utility.require_dataset(hdf5_data, structure.H5_MESH_FREE_SURFACE_VECTORS, free_surface_v.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_MESH_FREE_SURFACE_VECTORS_ATTR)
    dset[:, :] = free_surface_v

    free_surface_v = np.zeros((0, 0))

    if (n_x-1) > 0 and (n_y-1) >0:
        #free_surface_v = [[j+i*n_y, j+1+i*n_y, j+1+(i+1)*n_y, j+(i+1)*n_y] for i in range(n_x-1) for j in
                        #range(n_y-1)]
        free_surface_v = np.zeros((4, (n_x-1)*(n_y-1)))
        k = 0
        for i in range(n_x-1):
            for j in range(n_y-1):
                free_surface_v[0, k] = j+i*n_y
                free_surface_v[1, k] = j+1+i*n_y
                free_surface_v[2, k] = j+1+(i+1)*n_y
                free_surface_v[3, k] = j+(i+1)*n_y
                k += 1
    #free_surface_v = np.array(free_surface_v)
    dset = utility.require_dataset(hdf5_data, structure.H5_MESH_FREE_SURFACE_INDEX, free_surface_v.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_MESH_FREE_SURFACE_INDEX_ATTR)
    dset[:, :] = free_surface_v


    # Generate Kochin
    kochin = np.array([])
    if n_theta > 0:
        if n_theta > 1:
            kochin = [(theta_min+(theta_max-theta_min)*j/(n_theta-1))*np.pi/180. for j in range(n_theta)]
        else:
            kochin = [theta_min*np.pi/180.]


    kochin = np.array(kochin)
    dset = utility.require_dataset(hdf5_data, structure.H5_MESH_KOCHIN, kochin.shape, dtype='f', maxshape=(None, ))
    utility.set_hdf5_attributes(dset, structure.H5_MESH_KOCHIN_ATTR)
    dset[:] = kochin

    # Save index of cases

    out = np.array([[k+1, int_case[k].body+1, int_case[k].mode+1] for k in range(n_integration)])
    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_CASE_FORCE, out.shape, dtype='i')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_CASE_FORCE_ATTR)
    dset[:, :] = out

    out = np.array([[k+1, rad_case[k].body+1, rad_case[k].mode+1] for k in range(n_radiation)])
    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_CASE_MOTION, out.shape, dtype='i')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_CASE_MOTION_ATTR)
    dset[:, :] = out

    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_CASE_BETA, beta.shape, dtype='f', maxshape=(None))
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_CASE_BETA_ATTR)
    dset[:] = beta

    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_CASE_W, w.shape, dtype='f', maxshape=(None))
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_CASE_W_ATTR)
    dset[:] = w

    out = np.array([(theta_min+(theta_max-theta_min)*k/(n_theta-1))*np.pi/180. for k in range(n_theta)])
    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_CASE_THETA, out.shape, dtype='f', maxshape=(None))
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_CASE_THETA_ATTR)
    dset[:] = out

    # Save radiation cases

    out = np.array([[rad_case[k].body+1, rad_case[k].i_case+1,  rad_case[k].direction[0], rad_case[k].direction[1], rad_case[k].direction[2],  rad_case[k].axis[0],  rad_case[k].axis[1] ,  rad_case[k].axis[2]] for k in range(n_radiation)])
    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_CASE_RADIATION, out.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_CASE_RADIATION_ATTR)
    dset[:, :] = out

    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_CASE_BETA, beta.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_CASE_BETA_ATTR)
    dset[:] = beta

    switch_ode_influence = utility.get_setting(settings.USE_ODE_INFLUENCE_COEFFICIENTS, custom_config,
                                       'USE_ODE_INFLUENCE_COEFFICIENTS')

    use_higher_order = utility.get_setting(settings.USE_HIGHER_ORDER, custom_config,
                                       'USE_HIGHER_ORDER')

    num_panel_higher_order = utility.get_setting(settings.NUM_PANEL_HIGHER_ORDER, custom_config,
                                       'NUM_PANEL_HIGHER_ORDER')

    b_spline_order = utility.get_setting(settings.B_SPLINE_ORDER, custom_config,
                                       'B_SPLINE_ORDER')

    use_dipoles_implementation = utility.get_setting(settings.USE_DIPOLES_IMPLEMENTATION, custom_config,
                                       'USE_DIPOLES_IMPLEMENTATION')

    compute_yaw_moment = utility.get_setting(settings.COMPUTE_YAW_MOMENT, custom_config,
                                       'COMPUTE_YAW_MOMENT')

    compute_drift_forces = utility.get_setting(settings.COMPUTE_DRIFT_FORCES, custom_config,
                                       'COMPUTE_DRIFT_FORCES')

    thin_panels = utility.get_setting(settings.THIN_PANELS, custom_config,
                                       'THIN_PANELS')

    if num_panel_higher_order is not None and num_panel_higher_order > 0:
        dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_NUM_PANEL_HIGHER_ORDER, (1, ), dtype='i')
        utility.set_hdf5_attributes(dset, structure.H5_SOLVER_NUM_PANEL_HIGHER_ORDER_ATTR)
        dset[:] = int(num_panel_higher_order)

    if b_spline_order is not None and b_spline_order > 0:
        dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_B_SPLINE_ORDER, (1, ), dtype='i')
        utility.set_hdf5_attributes(dset, structure.H5_SOLVER_B_SPLINE_ORDER_ATTR)
        dset[:] = int(b_spline_order)

    if use_higher_order is not None:
        dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_USE_HIGHER_ORDER, (1, ), dtype='i')
        utility.set_hdf5_attributes(dset, structure.H5_SOLVER_USE_HIGHER_ORDER_ATTR)
        dset[:] = int(use_higher_order)


    if switch_ode_influence is not None:
        temp = int(switch_ode_influence)*np.ones(n_problems, dtype='i')
        dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_SWITCH_ODE_INFLUENCE, temp.shape, dtype='i')
        utility.set_hdf5_attributes(dset, structure.H5_SOLVER_SWITCH_ODE_INFLUENCE_ATTR)
        dset[:] = temp

    if use_dipoles_implementation is not None:
        dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_USE_DIPOLES_IMPLEMENTATION, (1, ), dtype='i')
        utility.set_hdf5_attributes(dset, structure.H5_SOLVER_USE_DIPOLES_IMPLEMENTATION_ATTR)
        dset[:] = int(use_dipoles_implementation)

    if compute_yaw_moment is not None:
        dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_COMPUTE_YAW_MOMENT, (1, ), dtype='i')
        utility.set_hdf5_attributes(dset, structure.H5_SOLVER_COMPUTE_YAW_MOMENT_ATTR)
        dset[:] = int(compute_yaw_moment)

    if compute_drift_forces is not None:
        dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_COMPUTE_DRIFT_FORCES, (1, ), dtype='i')
        utility.set_hdf5_attributes(dset, structure.H5_SOLVER_COMPUTE_DRIFT_FORCES_ATTR)
        dset[:] = int(compute_drift_forces)

    if thin_panels is not None:
        temp = np.zeros(mesh.n_panels, dtype='i')
        for idx in thin_panels:
            if idx == -1:
                temp = np.ones(mesh.n_panels, dtype='i')
                break
            elif idx >= 0:
                temp[idx] = 1
        dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_THIN_PANELS, temp.shape, dtype='i')
        utility.set_hdf5_attributes(dset, structure.H5_SOLVER_THIN_PANELS_ATTR)
        dset[:] = temp
示例#44
0
def run(hdf5_data, custom_config):
    """
    This function run the postprocessor
    Args:
        hdf5_data: object, the hdf5 opened file
        custom_config, dict The custom configuration dictionary
    """

    logger = logging.getLogger(__name__)
    signature = __name__ + '.run(hdf5_data, custom_config)'
    # No need to log the parameter of the method here as it will only be duplicate.
    # This function is never called directly by the user and always call from the postprocess function
    # which already logs the configuration.
    utility.log_entrance(logger, signature, {})

    logger.info('Initialisation the post processing steps')

    logger.info('Reading environment data ...')
    environment = utility.read_environment(hdf5_data)
    logger.info('Read environment data' + str(environment))

    logger.info('Reading simulation results')
    result = read_results(hdf5_data)
    logger.info('Read solver result ' + str(result))

    logger.info('Post processing initialisation done !')

    # Saving to hdf5 file
    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_RESULTS_ADDED_MASS,
                                   result.added_mass.shape,
                                   dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_ADDED_MASS_ATTR)
    dset[:, :, :] = result.added_mass
    logger.info('Saved ' +
                str(structure.H5_RESULTS_ADDED_MASS_ATTR['description']) +
                ' at ' + structure.H5_RESULTS_ADDED_MASS +
                ' with characteristics ' + str(dset))

    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_RESULTS_RADIATION_DAMPING,
                                   result.radiation_damping.shape,
                                   dtype='f')
    utility.set_hdf5_attributes(dset,
                                structure.H5_RESULTS_RADIATION_DAMPING_ATTR)
    dset[:, :, :] = result.radiation_damping
    logger.info(
        'Saved ' +
        str(structure.H5_RESULTS_RADIATION_DAMPING_ATTR['description']) +
        ' at ' + structure.H5_RESULTS_RADIATION_DAMPING +
        ' with characteristics ' + str(dset))

    excitation_forces = result.diffraction_force + result.froudkrylov_force
    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_RESULTS_EXCITATION_FORCES,
                                   excitation_forces.shape,
                                   dtype='F')
    utility.set_hdf5_attributes(dset,
                                structure.H5_RESULTS_EXCITATION_FORCES_ATTR)
    dset[:, :, :] = excitation_forces
    logger.info(
        'Saved ' +
        str(structure.H5_RESULTS_EXCITATION_FORCES_ATTR['description']) +
        ' at ' + structure.H5_RESULTS_EXCITATION_FORCES +
        ' with characteristics ' + str(dset))

    tec_file = utility.get_setting(settings.RADIATION_COEFFICIENTS_TEC_FILE,
                                   custom_config,
                                   'RADIATION_COEFFICIENTS_TEC_FILE')
    if tec_file:
        save_radiation_coefficients(result, tec_file)
        logger.info(
            'Radiation coefficients successfully saved in tecplot format at ' +
            str(tec_file))
    else:
        logger.info(
            'Radiation coefficients tecplot format generation is disabled')

    tec_file = utility.get_setting(settings.DIFFRACTION_FORCE_TEC_FILE,
                                   custom_config, 'DIFFRACTION_FORCE_TEC_FILE')

    if tec_file:
        save_diffraction_force(result, tec_file)
        logger.info(
            'Diffraction forces successfully saved in tecplot format at ' +
            str(tec_file))
    else:
        logger.info('Diffraction forces tecplot format generation is disabled')

    tec_file = utility.get_setting(settings.EXCITATION_FORCE_TEC_FILE,
                                   custom_config, 'EXCITATION_FORCE_TEC_FILE')
    if tec_file:
        save_excitation_force(result, tec_file)
        logger.info(
            'Excitation forces successfully saved in tecplot format at ' +
            str(tec_file))
    else:
        logger.info('Excitation forces tecplot format generation is disabled')

    irf = get_irf(hdf5_data, result)

    if irf.switch == 1:
        irf = compute_irf(result, irf)
        # Saving to hdf5 file
        dset = utility.require_dataset(
            hdf5_data,
            structure.H5_RESULTS_ADDED_MASS_INFINITE,
            irf.added_mass.shape,
            dtype='f')
        utility.set_hdf5_attributes(
            dset, structure.H5_RESULTS_ADDED_MASS_INFINITE_ATTR)
        dset[:, :] = irf.added_mass

        tec_file = utility.get_setting(settings.IRF_TEC_FILE, custom_config,
                                       'IRF_TEC_FILE')
        if tec_file:
            save_irf(irf, tec_file)
            logger.info('IRF successfully saved in tecplot format at ' +
                        str(tec_file))
        else:
            logger.info('IRF tecplot format generation is disabled')
    else:
        logger.info('IRF computation is disabled')

    raos = np.zeros((result.n_integration, result.n_w, result.n_beta),
                    dtype='F')
    raos = compute_raos(raos, result)

    tec_file = utility.get_setting(settings.WAVE_FIELD_TEC_FILE, custom_config,
                                   'WAVE_FIELD_TEC_FILE')

    dset = hdf5_data.get(structure.H5_SOLVER_USE_HIGHER_ORDER)
    utility.check_dataset_type(
        dset,
        name=str(structure.H5_SOLVER_USE_HIGHER_ORDER_ATTR['description']),
        location=structure.H5_SOLVER_USE_HIGHER_ORDER)
    use_higher_order = dset[0]

    dset = hdf5_data.get(structure.H5_SOLVER_USE_DIPOLES_IMPLEMENTATION)
    utility.check_dataset_type(
        dset,
        name=str(structure.
                 H5_SOLVER_USE_DIPOLES_IMPLEMENTATION_ATTR['description']),
        location=structure.H5_SOLVER_USE_DIPOLES_IMPLEMENTATION)
    use_dipoles_implementation = dset[0]

    dset = hdf5_data.get(structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES)
    utility.check_dataset_type(
        dset,
        name=str(structure.
                 H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES_ATTR['description']),
        location=structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES)
    remove_irregular_frequencies = dset[0]

    if tec_file:
        if use_higher_order != 1 and use_dipoles_implementation != 1 and remove_irregular_frequencies != 1:
            res = compute_wave_elevation(hdf5_data, environment, 0, 0, raos,
                                         result)
            save_wave_elevation(res['w'], res['etai'], res["etap"], res["eta"],
                                res["x"], res["y"], tec_file)
            logger.info(
                'Wave elevation successfully saved in tecplot format at ' +
                str(tec_file))
        else:
            logger.info(
                'Wave elevation computation is not supported when higher order panel, '
                +
                'used diplome implementation or remove irregular frequencies are enabled.'
                + ' Disabling it.')
    else:
        logger.info('Wave elevation tecplot format generation is disabled')
示例#45
0
def update_displayed():
    menu_path = xbmc.getInfoLabel("Container.FolderPath")
    if not utility.get_setting("show_profiles") == "true":
        utility.set_setting("selected_profile", None)
        connect.save_session()
    xbmc.executebuiltin("Container.Update(" + menu_path + ")")
示例#46
0
def update_displayed():
    menu_path = xbmc.getInfoLabel('Container.FolderPath')
    if not utility.get_setting('show_profiles') == 'true':
        utility.set_setting('selected_profile', None)
        connect.save_session()
    xbmc.executebuiltin('Container.Update(' + menu_path + ')')
示例#47
0
def read_mesh(hdf5_data, custom_config):
    """
    Read the mesh data from the hdf5 file
    Args:
        hdf5_data: object, the hdf5 opened file

    Return:
        the mesh data
    """
    n_points = 0
    n_panels = 0
    bodies = hdf5_data.get(structure.H5_BODIES).values()
    n_bodies = len(bodies)

    interior_mesh_points = np.empty((3, 0))
    interior_mesh_panels = np.empty((4, 0))
    interior_c_panels = np.empty((0))
    interior_n_points = 0
    interior_n_panels = 0
    remove_irregular_frequencies = utility.get_setting(
        settings.REMOVE_IRREGULAR_FREQUENCIES, custom_config,
        'REMOVE_IRREGULAR_FREQUENCIES')
    for c in range(n_bodies):
        body = bodies[c]
        n_points += body.get(structure.H5_BODY_NUM_POINTS)[0]
        n_panels += body.get(structure.H5_BODY_NUM_PANELS)[0]

    mesh = TMesh(n_points=n_points, n_panels=n_panels, n_bodies=n_bodies)

    n_points = 0
    n_panels = 0

    for c in range(n_bodies):
        body = bodies[c]

        mesh_arr = body.get(structure.H5_BODY_MESH)

        n = mesh_arr[0, 1]

        if c > 0 and (n != mesh.i_sym):
            print(
                ' Error: there is an inconsistency in the mesh files regarding the xOz symmetries'
            )
            sys.exit()
        else:
            mesh.i_sym = int(n)

        m = body.get(structure.H5_BODY_NUM_POINTS)[0]
        n = body.get(structure.H5_BODY_NUM_PANELS)[0]

        for i in range(m):
            mesh.x[:, n_points + i] = np.array(mesh_arr[i + 1, 1:4])

        if remove_irregular_frequencies:
            # If we have to remove frequencies, then we need to discretize the free surface
            int_mesh = generate_mesh(np.asarray(mesh_arr[1:m, 1:4]))
            interior_mesh_points = np.concatenate(
                (interior_mesh_points, int_mesh["x"]), axis=1)
            interior_mesh_panels = np.concatenate(
                (interior_mesh_panels,
                 int_mesh["p"] + mesh.n_points + interior_n_points),
                axis=1)
            interior_c_panels = np.concatenate(
                (interior_c_panels, c * np.ones(int_mesh["n_panels"])), axis=0)
            interior_n_points += int_mesh["n_points"]
            interior_n_panels += int_mesh["n_panels"]

        for i in range(m, m + n):
            mesh.p[:, n_panels + i - m] = np.array(mesh_arr[i + 1, 0:4]) - 1
            for j in range(4):
                mesh.p[j, n_panels + i - m] += n_points
            mesh.c_panel[n_panels + i - m] = c

        n_points += m
        n_panels += n
        mesh.last_panel[c] = n_panels

    if remove_irregular_frequencies:
        # If we have to remove frequencies, then we need to extend the mesh so
        # that it contains the panels of the free surface too
        mesh_interior = TMesh(n_points=n_points + interior_n_points,
                              n_panels=n_panels + interior_n_panels,
                              n_bodies=n_bodies)
        mesh_interior.x[:, 0:n_points] = mesh.x
        mesh_interior.x[:, n_points:] = interior_mesh_points
        mesh_interior.p[:, 0:n_panels] = mesh.p
        mesh_interior.p[:, n_panels:] = interior_mesh_panels
        mesh_interior.last_panel = mesh.last_panel
        mesh_interior.c_panel[0:n_panels] = mesh.c_panel
        mesh_interior.c_panel[n_panels:] = interior_c_panels
        mesh_interior.i_sym = mesh.i_sym
        mesh = mesh_interior

        is_interior_domain = np.zeros((n_panels + interior_n_panels))
        is_interior_domain[n_panels:] = 1

        dset = utility.require_dataset(hdf5_data,
                                       structure.H5_SOLVER_IS_INTERIOR_DOMAIN,
                                       is_interior_domain.shape,
                                       dtype='i')
        utility.set_hdf5_attributes(
            dset, structure.H5_SOLVER_IS_INTERIOR_DOMAIN_ATTR)
        dset[:] = is_interior_domain

        n_panels += interior_n_panels
        n_points += interior_n_points

    for i in range(mesh.n_panels):
        u = mesh.x[:, mesh.p[1, i]] - mesh.x[:, mesh.p[0, i]]
        v = mesh.x[:, mesh.p[3, i]] - mesh.x[:, mesh.p[1, i]]
        w1 = np.cross(u, v)
        a1 = 0.5 * np.linalg.norm(w1)

        u = mesh.x[:, mesh.p[3, i]] - mesh.x[:, mesh.p[2, i]]
        v = mesh.x[:, mesh.p[1, i]] - mesh.x[:, mesh.p[2, i]]
        w2 = np.cross(u, v)
        a2 = 0.5 * np.linalg.norm(w2)

        mesh.a[i] = a1 + a2

        if mesh.a[i] < utility.EPS:
            print('Error: surface of panel ' + str(i) + ' is too small (' +
                  str(mesh.a[i]) + ')')
            sys.exit()

        mesh.xm[:, i] = (1. / 3) * (mesh.x[:, mesh.p[0, i]] +
                                    mesh.x[:, mesh.p[1, i]] +
                                    mesh.x[:, mesh.p[3, i]]) * a1 / mesh.a[i]

        mesh.xm[:, i] += (1. / 3) * (mesh.x[:, mesh.p[1, i]] +
                                     mesh.x[:, mesh.p[2, i]] +
                                     mesh.x[:, mesh.p[3, i]]) * a2 / mesh.a[i]

        u = w1 + w2

        mesh.n[:, i] = u / np.linalg.norm(u)

    return mesh
示例#48
0
def login():
    login_progress = xbmcgui.DialogProgress()
    login_progress.create('Netflix', utility.get_string(30200) + '...')
    utility.progress_window(login_progress, 25, utility.get_string(30201))
    connect.session.cookies.clear()
    content = utility.decode(connect.load_site(utility.main_url + 'Login'))
    if not 'Sorry, Netflix ' in content:
        match = re.compile('name="authURL" value="(.+?)"', re.DOTALL).findall(content)
        utility.log('Setting authorization url: ' + match[0])
        utility.set_setting('authorization_url', match[0])
        match = re.compile('locale: "(.+?)"', re.DOTALL).findall(content)
        utility.set_setting('language', match[0])
        post_data = {'authURL': utility.get_setting('authorization_url'), 'email': utility.get_setting('username'),
                     'password': utility.get_setting('password'), 'RememberMe': 'on'}
        utility.progress_window(login_progress, 50, utility.get_string(30202))
        content = utility.decode(connect.load_site(utility.main_url + 'Login?locale=' +
                                                   utility.get_setting('language'), post=post_data))
        if 'id="page-LOGIN"' in content:
            utility.notification(utility.get_string(30303))
            return False
        match = re.compile('"apiUrl":"(.+?)",').findall(content)
        utility.set_setting('api_url', match[0])
        post_data = utility.my_list % utility.get_setting('authorization_url')
        content = utility.decode(connect.load_site(utility.evaluator(), post=post_data))
        matches = json.loads(content)['value']
        match = matches['lolomos'].keys()
        utility.set_setting('root_list', match[0])
        match = matches['lists'].keys()
        utility.set_setting('my_list', match[1])
        match = matches['lists'][utility.get_setting('my_list')]['trackIds']['trackId']
        utility.set_setting('track_id', unicode(match))
        connect.save_session()
        utility.progress_window(login_progress, 75, utility.get_string(30203))
        if not (utility.get_setting('selected_profile') or (utility.get_setting('single_profile') == 'true')):
            profiles.choose()
        elif not (utility.get_setting('single_profile') == 'true') and (utility.get_setting('show_profiles') == 'true'):
            profiles.choose()
        elif not ((utility.get_setting('single_profile') and utility.get_setting('show_profiles')) == 'true'):
            profiles.load()
        #if not utility.get_setting('is_kid') == 'true':
            #match = re.compile('"version":{"app":"(.+?)"').findall(content)
            #utility.set_setting('lolomos', match[0])
            #3a5922fa-a4a9-41d8-a08c-9e84c2d32be4_ROOT
        if login_progress:
            if not utility.progress_window(login_progress, 100, utility.get_string(30204)):
                return False
            xbmc.sleep(500)
            login_progress.close()
        return True
    else:
        utility.notification(utility.get_string(30300))
        if login_progress:
            login_progress.close()
        return False
示例#49
0
    else:
        print('  ->  Infinite water depth')

    print('  -> ' + str(n_w) + ' wave frequencies from ' + str(w[0]) + ' to ' +
          str(w[n_w - 1]))
    print('  -> ' + str(n_beta) + str(' wave directions from  ') +
          str(beta[0]) + ' to ' + str(beta[n_beta - 1]))
    print('  -> ' + str(n_radiation) + ' radiation problems')
    print('  -> ' + str(n_integration) + ' forces')
    print('')

    mesh = read_mesh(hdf5_data, custom_config)
    write_mesh_l12(mesh, hdf5_data)
    write_mesh_l10(mesh, hdf5_data)

    mesh_tec_file = utility.get_setting(settings.MESH_TEC_FILE, custom_config,
                                        'MESH_TEC_FILE')

    if mesh_tec_file:
        write_mesh_tec(mesh, mesh_tec_file)

    fnds = np.zeros((n_integration, mesh.n_panels * 2**mesh.i_sym),
                    settings.NEMOH_FLOAT)

    for j in range(n_integration):
        fnds[j, :] = compute_nds(mesh, int_case[j].body, int_case[j].i_case,
                                 int_case[j].direction, int_case[j].axis)

    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_MESH_INTEGRATION,
                                   fnds.shape,
                                   dtype='f')
示例#50
0
def video(name,
          url,
          mode,
          thumb,
          video_type='',
          description='',
          duration='',
          year='',
          mpaa='',
          director='',
          genre='',
          rating=0.0,
          playcount=0,
          remove=False):
    entries = []
    cover_file, fanart_file = utility.cover_fanart(url)
    if xbmcvfs.exists(cover_file):
        thumb = cover_file
    u = sys.argv[0]
    u += '?url=' + urllib.quote_plus(url)
    u += '&mode=' + mode
    u += '&name=' + urllib.quote_plus(utility.encode(name))
    u += '&thumb=' + urllib.quote_plus(thumb)
    list_item = xbmcgui.ListItem(name)
    list_item.setArt({'icon': 'DefaultTVShows.png', 'thumb': thumb})
    list_item.setInfo(type='video',
                      infoLabels={
                          'title': name,
                          'plot': description,
                          'duration': duration,
                          'year': int(year),
                          'mpaa': mpaa,
                          'director': director,
                          'genre': genre,
                          'rating': rating,
                          'playcount': playcount
                      })
    if xbmcvfs.exists(fanart_file):
        list_item.setProperty('fanart_image', fanart_file)
    elif xbmcvfs.exists(cover_file):
        list_item.setProperty('fanart_image', cover_file)
    else:
        list_item.setProperty('fanart_image', utility.addon_fanart())
    if video_type == 'tvshow':
        if utility.get_setting('browse_tv_shows') == 'true':
            entries.append((utility.get_string(
                30151
            ), 'Container.Update(plugin://%s/?mode=play_video_main&url=%s&thumb=%s)'
                            % (utility.addon_id, urllib.quote_plus(url),
                               urllib.quote_plus(thumb))))
        else:
            entries.append((utility.get_string(
                30152
            ), 'Container.Update(plugin://%s/?mode=list_seasons&url=%s&thumb=%s)'
                            % (utility.addon_id, urllib.quote_plus(url),
                               urllib.quote_plus(thumb))))
    if video_type != 'episode':
        entries.append(
            (utility.get_string(30153),
             'RunPlugin(plugin://%s/?mode=play_trailer&url=%s&type=%s)' %
             (utility.addon_id, urllib.quote_plus(
                 utility.encode(name)), video_type)))
        if remove:
            entries.append(
                (utility.get_string(30154),
                 'RunPlugin(plugin://%s/?mode=remove_from_queue&url=%s)' %
                 (utility.addon_id, urllib.quote_plus(url))))
        else:
            entries.append(
                (utility.get_string(30155),
                 'RunPlugin(plugin://%s/?mode=add_to_queue&url=%s)' %
                 (utility.addon_id, urllib.quote_plus(url))))
        entries.append((
            utility.get_string(30156),
            'Container.Update(plugin://%s/?mode=list_videos&url=%s&type=movie)'
            % (utility.addon_id,
               urllib.quote_plus(utility.main_url + 'WiMovie/' + url))))
        entries.append(
            (utility.get_string(30157),
             'Container.Update(plugin://%s/?mode=list_videos&url=%s&type=tv)' %
             (utility.addon_id,
              urllib.quote_plus(utility.main_url + 'WiMovie/' + url))))
    if video_type == 'tvshow':
        entries.append((
            utility.get_string(30150),
            'RunPlugin(plugin://%s/?mode=add_series_to_library&url=&name=%s&series_id=%s)'
            % (utility.addon_id, urllib.quote_plus(utility.encode(
                name.strip())), urllib.quote_plus(url))))
    elif video_type == 'movie':
        entries.append(
            (utility.get_string(30150),
             'RunPlugin(plugin://%s/?mode=add_movie_to_library&url=%s&name=%s)'
             % (utility.addon_id, urllib.quote_plus(url),
                urllib.quote_plus(utility.encode(name.strip())) + ' (' +
                unicode(year) + ')')))
    list_item.addContextMenuItems(entries)
    directory_item = xbmcplugin.addDirectoryItem(handle=plugin_handle,
                                                 url=u,
                                                 listitem=list_item,
                                                 isFolder=True)
    return directory_item
示例#51
0
from __future__ import unicode_literals

import base64
import urllib

import list
import resources.lib.tmdbsimple as tmdbsimple
import utility

tmdbsimple.API_KEY = base64.b64decode(
    'NDc2N2I0YjJiYjk0YjEwNGZhNTUxNWM1ZmY0ZTFmZWM=')
language = utility.get_setting('language').split('-')[0]


def netflix(video_type):
    search_string = utility.keyboard()
    if search_string:
        list.search(search_string, video_type)


def tmdb(video_type, title, year=None):
    search = tmdbsimple.Search()
    if video_type.startswith('tv'):
        content = search.tv(query=utility.encode(title),
                            first_air_date_year=year,
                            language=language,
                            include_adult='true')
        if content['total_results'] == 0:
            content = search.tv(query=utility.encode(title),
                                language=language,
                                include_adult='true')
示例#52
0
def videos(url, video_type, run_as_widget=False):
    loading_progress = None
    if not run_as_widget:
        loading_progress = xbmcgui.DialogProgress()
        loading_progress.create('Netflix', utility.get_string(30205) + '...')
        utility.progress_window(loading_progress, 0, '...')
    xbmcplugin.setContent(plugin_handle, 'movies')
    if not xbmcvfs.exists(utility.session_file()):
        login.login()
    '''
    the next part is necessary during the changing phase. Otherwise data are not available.
    '''
    if 'recently-added' in url:
        postdata = utility.recently_added % utility.get_setting('authorization_url')
        content = utility.decode(connect.load_site(utility.evaluator(), post=postdata))
    else:
        content = utility.decode(connect.load_site(url))
    if not 'id="page-LOGIN"' in content or url == 'recently-added':
        if utility.get_setting('single_profile') == 'true' and 'id="page-ProfilesGate"' in content:
            profiles.force_choose()
        else:
            if '<div id="queue"' in content:
                content = content[content.find('<div id="queue"'):]
            if not 'recently-added' in url:
                content = utility.clean_content(content)
            match = None
            if not match: match = re.compile('"\$type":"leaf",.*?"id":([0-9]+)', re.DOTALL).findall(content)
            print '1: ' + str(match)
            if not match: match = re.compile('<a href="\/watch\/([0-9]+)', re.DOTALL).findall(content)
            print '2: ' + str(match)
            if not match: match = re.compile('<span id="dbs(.+?)_.+?alt=".+?"', re.DOTALL).findall(content)
            print '3: ' + str(match)
            if not match: match = re.compile('<span class="title.*?"><a id="b(.+?)_', re.DOTALL).findall(content)
            print '4: ' + str(match)
            if not match: match = re.compile('"boxart":".+?","titleId":(.+?),', re.DOTALL).findall(content)
            print '5: ' + str(match)
            if not match: match = re.compile('WiPlayer\?movieid=([0-9]+?)&', re.DOTALL).findall(content)
            print '6: ' + str(match)
            if 'recently-added' in url:
                matches = json.loads(content)['value']['videos']
                for video_id in matches:
                    match.append(unicode(video_id))
            print '7: ' + str(match)
            print len(match)
            i = 1
            for video_id in match:
                if int(video_id) > 10000000 or 'recently-added' in url:
                    if not run_as_widget:
                        utility.progress_window(loading_progress, i * 100 / len(match), '...')
                    video(video_id, '', '', False, False, video_type, url)
                i += 1
            match1 = re.compile('&pn=(.+?)&', re.DOTALL).findall(url)
            match2 = re.compile('&from=(.+?)&', re.DOTALL).findall(url)
            match_api_root = re.compile('"API_ROOT":"(.+?)"', re.DOTALL).findall(content)
            match_api_base = re.compile('"API_BASE_URL":"(.+?)"', re.DOTALL).findall(content)
            match_identifier = re.compile('"BUILD_IDENTIFIER":"(.+?)"', re.DOTALL).findall(content)
            if 'agid=' in url and match_api_root and match_api_base and match_identifier:
                genre_id = url[url.find('agid=') + 5:]
                add.directory(utility.get_string(30110), match_api_root[0] + match_api_base[0] + '/' + match_identifier[
                    0] + '/wigenre?genreId=' + genre_id + '&full=false&from=51&to=100&_retry=0', 'list_videos', '',
                              video_type)
            elif match1:
                current_page = match1[0]
                next_page = str(int(current_page) + 1)
                add.directory(utility.get_string(30110),
                              url.replace('&pn=' + current_page + '&', '&pn=' + next_page + '&'), 'list_videos', '',
                              video_type)
            elif match2:
                current_from = match2[0]
                next_from = str(int(current_from) + 50)
                current_to = str(int(current_from) + 49)
                next_to = str(int(current_from) + 99)
                add.directory(utility.get_string(30110),
                              url.replace('&from=' + current_from + '&', '&from=' + next_from + '&').replace(
                                  '&to=' + current_to + '&', '&to=' + next_to + '&'), 'list_videos', '', video_type)
            if utility.get_setting('force_view') == 'true' and not run_as_widget:
                xbmc.executebuiltin('Container.SetViewMode(' + utility.get_setting('view_id_videos') + ')')
        xbmcplugin.endOfDirectory(plugin_handle)
    else:
        delete.cookies()
        utility.log('User is not logged in.', loglevel=xbmc.LOGERROR)
        utility.notification(utility.get_string(30303))
示例#53
0
def run(hdf5_data, custom_config):
    """
    This function run the postprocessor
    Args:
        hdf5_data: object, the hdf5 opened file
        custom_config, dict The custom configuration dictionary
    """
    print('\n  -> Initialisation ...')

    try:
        environment = utility.read_environment(hdf5_data)
    except Exception as e:
        print('It looks like your hdf5 file is not correct. Please run ',
        'the preprocessor and the solver before running the postprocessor')
        sys.exit(1)

    result = read_results(hdf5_data)

    print('. Initialisation Done !\n')

    # Saving to hdf5 file
    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_ADDED_MASS, result.added_mass.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_ADDED_MASS_ATTR)
    dset[:, :, :] = result.added_mass

    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_RADIATION_DAMPING, result.radiation_damping.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_RADIATION_DAMPING_ATTR)
    dset[:, :, :] = result.radiation_damping

    excitation_forces = result.diffraction_force + result.froudkrylov_force
    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_EXCITATION_FORCES, excitation_forces.shape, dtype='F')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_EXCITATION_FORCES_ATTR)
    dset[:, :, :] = excitation_forces
    

    tec_file = utility.get_setting(settings.RADIATION_COEFFICIENTS_TEC_FILE, custom_config,
                                   'RADIATION_COEFFICIENTS_TEC_FILE')
    if tec_file:
        save_radiation_coefficients(result, tec_file)
        print('Radiation coefficients successfully saved.\n')

    tec_file = utility.get_setting(settings.DIFFRACTION_FORCE_TEC_FILE, custom_config,
                                   'DIFFRACTION_FORCE_TEC_FILE')
    if tec_file:
        save_diffraction_force(result, tec_file)
        print('Diffraction forces successfully saved.\n')

    tec_file = utility.get_setting(settings.EXCITATION_FORCE_TEC_FILE, custom_config,
                                   'EXCITATION_FORCE_TEC_FILE')
    if tec_file:
        save_excitation_force(result, tec_file)
        print('Excitation forces successfully saved.\n')

    
    irf = get_irf(hdf5_data, result)
    if not irf:
        print('It looks like your hdf5 file is not correct. Please run ',
        'the preprocessor and the solver before running the postprocessor')
        sys.exit(1)
    if irf.switch == 1:
        irf = compute_irf(result, irf)
        # Saving to hdf5 file
        dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_ADDED_MASS_INFINITE, irf.added_mass.shape, dtype='f')
        utility.set_hdf5_attributes(dset, structure.H5_RESULTS_ADDED_MASS_INFINITE_ATTR)
        dset[:, :] = irf.added_mass

        tec_file = utility.get_setting(settings.IRF_TEC_FILE, custom_config,
                                       'IRF_TEC_FILE')
        if tec_file:
            save_irf(irf, tec_file)
            print('IRF successfully saved.\n')

    raos = np.zeros((result.n_integration, result.n_w, result.n_beta), dtype='F')
    raos = compute_raos(raos, result)

    

    tec_file = utility.get_setting(settings.WAVE_FIELD_TEC_FILE, custom_config,
                                   'WAVE_FIELD_TEC_FILE')
    if tec_file and hdf5_data.get(structure.H5_SOLVER_USE_HIGHER_ORDER)[0] != 1 and hdf5_data.get(structure.H5_SOLVER_USE_DIPOLES_IMPLEMENTATION)[0] != 1 and hdf5_data.get(structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES)[0] != 1:
        res = compute_wave_elevation(hdf5_data, environment, 0, 0, raos, result)
        save_wave_elevation(res['w'], res['etai'], res["etap"], res["eta"], res["x"], res["y"],
                            tec_file)
        print('Wave elevation successfully saved.\n')

    print(' -> All results successfully saved.\n')
示例#54
0
def videos(url, video_type, run_as_widget=False):
    loading_progress = None
    if not run_as_widget:
        loading_progress = xbmcgui.DialogProgress()
        loading_progress.create('Netflix', utility.get_string(30205) + '...')
        utility.progress_window(loading_progress, 0, '...')
    xbmcplugin.setContent(plugin_handle, 'movies')
    if not xbmcvfs.exists(utility.session_file()):
        login.login()
    content = utility.decode(connect.load_site(url))
    if not 'id="page-LOGIN"' in content:
        if utility.get_setting(
                'single_profile'
        ) == 'true' and 'id="page-ProfilesGate"' in content:
            profiles.force_choose()
        else:
            if '<div id="queue"' in content:
                content = content[content.find('<div id="queue"'):]
            content = utility.clean_content(content)
            match = None
            if not match:
                match = re.compile('"\$type":"leaf",.*?"id":([0-9]+)',
                                   re.DOTALL).findall(content)
            print '1: ' + str(match)
            if not match:
                match = re.compile('<a href="\/watch\/([0-9]+)',
                                   re.DOTALL).findall(content)
            print '2: ' + str(match)
            if not match:
                match = re.compile('<span id="dbs(.+?)_.+?alt=".+?"',
                                   re.DOTALL).findall(content)
            print '3: ' + str(match)
            if not match:
                match = re.compile('<span class="title.*?"><a id="b(.+?)_',
                                   re.DOTALL).findall(content)
            print '4: ' + str(match)
            if not match:
                match = re.compile('"boxart":".+?","titleId":(.+?),',
                                   re.DOTALL).findall(content)
            print '5: ' + str(match)
            if not match:
                match = re.compile('WiPlayer\?movieid=([0-9]+?)&',
                                   re.DOTALL).findall(content)
            print '6: ' + str(match)
            print len(match)
            i = 1
            for video_id in match:
                if int(video_id) > 10000000:
                    if not run_as_widget:
                        utility.progress_window(loading_progress,
                                                i * 100 / len(match), '...')
                    video(video_id, '', '', False, False, video_type, url)
                i += 1
            match1 = re.compile('&pn=(.+?)&', re.DOTALL).findall(url)
            match2 = re.compile('&from=(.+?)&', re.DOTALL).findall(url)
            match_api_root = re.compile('"API_ROOT":"(.+?)"',
                                        re.DOTALL).findall(content)
            match_api_base = re.compile('"API_BASE_URL":"(.+?)"',
                                        re.DOTALL).findall(content)
            match_identifier = re.compile('"BUILD_IDENTIFIER":"(.+?)"',
                                          re.DOTALL).findall(content)
            if 'agid=' in url and match_api_root and match_api_base and match_identifier:
                genre_id = url[url.find('agid=') + 5:]
                add.directory(
                    utility.get_string(30110),
                    match_api_root[0] + match_api_base[0] + '/' +
                    match_identifier[0] + '/wigenre?genreId=' + genre_id +
                    '&full=false&from=51&to=100&_retry=0', 'list_videos', '',
                    video_type)
            elif match1:
                current_page = match1[0]
                next_page = str(int(current_page) + 1)
                add.directory(
                    utility.get_string(30110),
                    url.replace('&pn=' + current_page + '&',
                                '&pn=' + next_page + '&'), 'list_videos', '',
                    video_type)
            elif match2:
                current_from = match2[0]
                next_from = str(int(current_from) + 50)
                current_to = str(int(current_from) + 49)
                next_to = str(int(current_from) + 99)
                add.directory(
                    utility.get_string(30110),
                    url.replace('&from=' + current_from + '&',
                                '&from=' + next_from + '&').replace(
                                    '&to=' + current_to + '&',
                                    '&to=' + next_to + '&'), 'list_videos', '',
                    video_type)
            if utility.get_setting(
                    'force_view') == 'true' and not run_as_widget:
                xbmc.executebuiltin('Container.SetViewMode(' +
                                    utility.get_setting('view_id_videos') +
                                    ')')
        xbmcplugin.endOfDirectory(plugin_handle)
    else:
        delete.cookies()
        utility.log('User is not logged in.', loglevel=xbmc.LOGERROR)
        utility.notification(utility.get_string(30303))
示例#55
0
def video(video_id, title, thumb_url, is_episode, hide_movies, video_type, url):
    added = False
    director = ''
    genre = ''
    playcount = 0
    video_details = get.video_info(video_id)
    match = json.loads(video_details)['value']['videos'][video_id]
    if not title:
        title = match['title']
    year = match['releaseYear']
    if not thumb_url:
        try:
            thumb_url = match['boxarts']['_665x375']['jpg']['url']
        except Exception:
            try:
                thumb_url = match['boxarts']['_342x192']['jpg']['url']
            except Exception:
                thumb_url = utility.addon_fanart()
    mpaa = match['maturity']['rating']['value']
    duration = match['runtime']
    offset = match['bookmarkPosition']
    try:
        if (duration > 0 and float(offset) / float(duration)) >= 0.9:
            playcount = 1
    except Exception:
        pass
    type = match['summary']['type']
    if type == 'movie':
        video_type_temp = type
    else:
        video_type_temp = 'tv'
        if is_episode:
            type = 'episode'
        else:
            type = 'tvshow'
            duration = ''
    if utility.get_setting('use_tmdb') == 'true':
        year_temp = year
        title_temp = title
        if ' - ' in title_temp:
            title_temp = title_temp[title_temp.index(' - '):]
        filename = video_id + '.jpg'
        filename_none = video_id + '.none'
        cover_file = xbmc.translatePath(utility.cover_cache_dir() + filename)
        cover_file_none = xbmc.translatePath(utility.cover_cache_dir() + filename_none)
        if not (xbmcvfs.exists(cover_file) or xbmcvfs.exists(cover_file_none)):
            utility.log('Downloading cover art. type: %s, video_id: %s, title: %s, year: %s' % (video_type_temp,
                                                                                                video_id, title_temp,
                                                                                                year_temp))
            get.cover(video_type_temp, video_id, title_temp, year_temp)
    description = match['details']['synopsis']
    try:
        director = match['details']['directors'][0]['name']
    except Exception:
        pass
    try:
        genre = match['details']['genres'][0]['name']
    except Exception:
        pass
    rating = match['userRating']['average']
    next_mode = 'play_video_main'
    if utility.get_setting('browse_tv_shows') == 'true' and type == 'tvshow':
        next_mode = 'list_seasons'
    if '/my-list' in url and video_type_temp == video_type:
        add.video(title, video_id, next_mode, thumb_url, type, description, duration, year, mpaa,
                  director, genre, rating, playcount, remove=True)
        added = True
    elif type == 'movie' and hide_movies:
        pass
    elif video_type_temp == video_type or video_type == 'both':
        add.video(title, video_id, next_mode, thumb_url, type, description, duration, year, mpaa,
                  director, genre, rating, playcount)
        added = True
    return added
示例#56
0
def video(video_id, title, thumb_url, is_episode, hide_movies, video_type,
          url):
    added = False
    year = ''
    mpaa = ''
    duration = ''
    description = ''
    director = ''
    genre = ''
    rating = 0.0
    video_details = get.video_info(video_id)
    match = re.compile('<span class="title.*?>(.+?)</span',
                       re.DOTALL).findall(video_details)
    if not title:
        title = match[0].strip()
    match = re.compile('<span class="year.*?>(.+?)</span',
                       re.DOTALL).findall(video_details)
    if match:
        year = match[0].partition('-')[0]
    if not thumb_url:
        match = re.compile('src="(.+?)"', re.DOTALL).findall(video_details)
        thumb_url = match[0].replace('/webp/',
                                     '/images/').replace('.webp', '.jpg')
    match = re.compile('<span class="mpaaRating.*?>(.+?)</span',
                       re.DOTALL).findall(video_details)
    if match:
        mpaa = match[0].strip()
    match = re.compile('<span class="duration.*?>(.+?)</span',
                       re.DOTALL).findall(video_details)
    if match:
        duration = match[0].lower()
    if duration.split(' ')[-1].startswith('min'):
        type = 'movie'
        video_type_temp = type
        duration = duration.split(' ')[0]
    else:
        video_type_temp = 'tv'
        if is_episode:
            type = 'episode'
        else:
            type = 'tvshow'
        duration = ''
    if utility.get_setting('use_tmdb') == 'true':
        year_temp = year
        title_temp = title
        if ' - ' in title_temp:
            title_temp = title_temp[title_temp.index(' - '):]
        if '-' in year_temp:
            year_temp = year_temp.split('-')[0]
        filename = utility.clean_filename(video_id) + '.jpg'
        filename_none = utility.clean_filename(video_id) + '.none'
        cover_file = xbmc.translatePath(utility.cover_cache_dir() + filename)
        cover_file_none = xbmc.translatePath(utility.cover_cache_dir() +
                                             filename_none)
        if not (xbmcvfs.exists(cover_file) or xbmcvfs.exists(cover_file_none)):
            utility.log(
                'Downloading cover art. type: %s, video_id: %s, title: %s, year: %s'
                % (video_type_temp, video_id, title_temp, year_temp))
            get.cover(video_type_temp, video_id, title_temp, year_temp)
    match = re.compile('src=".+?">.*?<.*?>(.+?)<',
                       re.DOTALL).findall(video_details)
    if match:
        description_temp = match[0]
        # replace all embedded unicode in unicode (Norwegian problem)
        description_temp = description_temp.replace('u2013',
                                                    unicode('\u2013')).replace(
                                                        'u2026',
                                                        unicode('\u2026'))
        description = utility.unescape(description_temp)
    match = re.compile('Director:</dt><dd>(.+?)<',
                       re.DOTALL).findall(video_details)
    if match:
        director = match[0].strip()
    match = re.compile('<span class="genre.*?>(.+?)</span',
                       re.DOTALL).findall(video_details)
    if match:
        genre = match[0]
    match = re.compile('<span class="rating">(.+?)</span',
                       re.DOTALL).findall(video_details)
    if len(match) > 0:
        rating = float(match[0])
    title = utility.unescape(title)
    next_mode = 'play_video_main'
    if utility.get_setting('browse_tv_shows') == 'true' and type == 'tvshow':
        next_mode = 'list_seasons'
    if '/my-list' in url and video_type_temp == video_type:
        add.video(title,
                  video_id,
                  next_mode,
                  thumb_url,
                  type,
                  description,
                  duration,
                  year,
                  mpaa,
                  director,
                  genre,
                  rating,
                  remove=True)
        added = True
    elif type == 'movie' and hide_movies:
        pass
    elif video_type_temp == video_type or video_type == 'both':
        add.video(title, video_id, next_mode, thumb_url, type, description,
                  duration, year, mpaa, director, genre, rating)
        added = True
    return added
示例#57
0
from __future__ import unicode_literals

import base64
import urllib

import list
import resources.lib.tmdbsimple as tmdbsimple
import utility

tmdbsimple.API_KEY = base64.b64decode('NDc2N2I0YjJiYjk0YjEwNGZhNTUxNWM1ZmY0ZTFmZWM=')
language = utility.get_setting('language').split('-')[0]


def netflix(video_type):
    search_string = utility.keyboard()
    if search_string:
        list.search(search_string, video_type)


def tmdb(video_type, title, year=None):
    search = tmdbsimple.Search()
    if video_type.startswith('tv'):
        content = search.tv(query=utility.encode(title), first_air_date_year=year, language=language,
                            include_adult='true')
        if content['total_results'] == 0:
            content = search.tv(query=utility.encode(title), language=language, include_adult='true')
            if content['total_results'] == 0:
                if '(' in title:
                    title = title[:title.find('(')]
                    content = search.tv(query=utility.encode(title), first_air_date_year=year, language=language,
                                        include_adult='true')
示例#58
0
def update_displayed():
    menu_path = xbmc.getInfoLabel('Container.FolderPath')
    if not utility.get_setting('show_profiles') == 'true':
        utility.set_setting('selected_profile', None)
        connect.save_session()
    xbmc.executebuiltin('Container.Update(' + menu_path + ')')
示例#59
0
def login():
    login_progress = xbmcgui.DialogProgress()
    login_progress.create('Netflix', utility.get_string(30200) + '...')
    utility.progress_window(login_progress, 25, utility.get_string(30201))
    connect.session.cookies.clear()
    content = utility.decode(connect.load_site(utility.main_url + '/Login'))
    match = re.compile('"locale":"(.+?)"', re.DOTALL | re.IGNORECASE).findall(content)
    if match and not utility.get_setting('language'):
        utility.log('Setting language: ' + match[0])
        utility.set_setting('language', match[0])
    if not 'Sorry, Netflix ' in content:
        match = re.compile('id="signout".+?authURL=(.+?)"', re.DOTALL).findall(content)
        if match:
            utility.log('Setting authorization url: ' + match[0])
            utility.set_setting('authorization_url', match[0])
        if 'id="page-LOGIN"' in content:
            match = re.compile('name="authURL" value="(.+?)"', re.DOTALL).findall(content)
            utility.log('Setting authorization url: ' + match[0])
            utility.set_setting('authorization_url', match[0])
            post_data = {'authURL': match[0], 'email': utility.get_setting('username'),
                         'password': utility.get_setting('password'), 'RememberMe': 'on'}
            utility.progress_window(login_progress, 50, utility.get_string(30202))
            content = utility.decode(connect.load_site(utility.main_url + '/Login?locale=' +
                                                       utility.get_setting('language'), post=post_data))
            if 'id="page-LOGIN"' in content:
                utility.notification(utility.get_string(30303))
                return False
            match = re.compile('"locale":"(.+?)"', re.DOTALL | re.IGNORECASE).findall(content)
            if match and not utility.get_setting('language'):
                utility.log('Setting language: ' + match[0])
                utility.set_setting('language', match[0])
            match = re.compile('"country":"(.+?)"', re.DOTALL | re.IGNORECASE).findall(content)
            if match:
                utility.log('Setting country code: ' + match[0])
                utility.set_setting('country_code', match[0])
            connect.save_session()
            utility.progress_window(login_progress, 75, utility.get_string(30203))
        if not (utility.get_setting('selected_profile') or (utility.get_setting('single_profile') == 'true')):
            profiles.choose()
        elif not (utility.get_setting('single_profile') == 'true') and (utility.get_setting('show_profiles') == 'true'):
            profiles.choose()
        elif not (
                    (utility.get_setting('single_profile') == 'true') and (
                            utility.get_setting('show_profiles') == 'true')):
            profiles.load()
        else:
            profiles.get_my_list_change_authorisation()
        if not utility.get_setting('is_kid') == 'true':
            content = utility.decode(connect.load_site(utility.main_url + '/browse'))
            match = re.compile('"version":{"app":"(.+?)"').findall(content)
            netflix_application, netflix_id = match[0].split('-')
            utility.set_setting('netflix_application', netflix_application)
            utility.set_setting('netflix_id', netflix_id)
        if login_progress:
            if not utility.progress_window(login_progress, 100, utility.get_string(30204)):
                return False
            xbmc.sleep(500)
            login_progress.close()
        return True
    else:
        utility.notification(utility.get_string(30300))
        if login_progress:
            login_progress.close()
        return False
示例#60
0
def run(hdf5_data, custom_config):
    """
    This function run the postprocessor
    Args:
        hdf5_data: object, the hdf5 opened file
        custom_config, dict The custom configuration dictionary
    """

    logger = logging.getLogger(__name__)
    signature = __name__ + '.run(hdf5_data, custom_config)'
    # No need to log the parameter of the method here as it will only be duplicate.
    # This function is never called directly by the user and always call from the postprocess function
    # which already logs the configuration.
    utility.log_entrance(logger, signature,
                         {})

    logger.info('Initialisation the post processing steps')

    logger.info('Reading environment data ...')
    environment = utility.read_environment(hdf5_data)
    logger.info('Read environment data' + str(environment))

    logger.info('Reading simulation results')
    result = read_results(hdf5_data)
    logger.info('Read solver result ' + str(result))

    logger.info('Post processing initialisation done !')

    # Saving to hdf5 file
    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_ADDED_MASS, result.added_mass.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_ADDED_MASS_ATTR)
    dset[:, :, :] = result.added_mass
    logger.info('Saved ' + str(structure.H5_RESULTS_ADDED_MASS_ATTR['description']) +
                ' at ' + structure.H5_RESULTS_ADDED_MASS + ' with characteristics ' +
                str(dset))

    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_RADIATION_DAMPING, result.radiation_damping.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_RADIATION_DAMPING_ATTR)
    dset[:, :, :] = result.radiation_damping
    logger.info('Saved ' + str(structure.H5_RESULTS_RADIATION_DAMPING_ATTR['description']) +
                ' at ' + structure.H5_RESULTS_RADIATION_DAMPING + ' with characteristics ' +
                str(dset))

    excitation_forces = result.diffraction_force + result.froudkrylov_force
    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_EXCITATION_FORCES, excitation_forces.shape, dtype='F')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_EXCITATION_FORCES_ATTR)
    dset[:, :, :] = excitation_forces
    logger.info('Saved ' + str(structure.H5_RESULTS_EXCITATION_FORCES_ATTR['description']) +
                ' at ' + structure.H5_RESULTS_EXCITATION_FORCES + ' with characteristics ' +
                str(dset))

    tec_file = utility.get_setting(settings.RADIATION_COEFFICIENTS_TEC_FILE, custom_config,
                                   'RADIATION_COEFFICIENTS_TEC_FILE')
    if tec_file:
        save_radiation_coefficients(result, tec_file)
        logger.info('Radiation coefficients successfully saved in tecplot format at ' +
                    str(tec_file))
    else:
        logger.info('Radiation coefficients tecplot format generation is disabled')

    tec_file = utility.get_setting(settings.DIFFRACTION_FORCE_TEC_FILE, custom_config,
                                   'DIFFRACTION_FORCE_TEC_FILE')

    if tec_file:
        save_diffraction_force(result, tec_file)
        logger.info('Diffraction forces successfully saved in tecplot format at ' +
                    str(tec_file))
    else:
        logger.info('Diffraction forces tecplot format generation is disabled')

    tec_file = utility.get_setting(settings.EXCITATION_FORCE_TEC_FILE, custom_config,
                                   'EXCITATION_FORCE_TEC_FILE')
    if tec_file:
        save_excitation_force(result, tec_file)
        logger.info('Excitation forces successfully saved in tecplot format at ' +
                    str(tec_file))
    else:
        logger.info('Excitation forces tecplot format generation is disabled')

    irf = get_irf(hdf5_data, result)

    if irf.switch == 1:
        irf = compute_irf(result, irf)
        # Saving to hdf5 file
        dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_ADDED_MASS_INFINITE, irf.added_mass.shape, dtype='f')
        utility.set_hdf5_attributes(dset, structure.H5_RESULTS_ADDED_MASS_INFINITE_ATTR)
        dset[:, :] = irf.added_mass

        tec_file = utility.get_setting(settings.IRF_TEC_FILE, custom_config,
                                       'IRF_TEC_FILE')
        if tec_file:
            save_irf(irf, tec_file)
            logger.info('IRF successfully saved in tecplot format at ' +
                        str(tec_file))
        else:
            logger.info('IRF tecplot format generation is disabled')
    else:
        logger.info('IRF computation is disabled')

    raos = np.zeros((result.n_integration, result.n_w, result.n_beta), dtype='F')
    raos = compute_raos(raos, result)

    tec_file = utility.get_setting(settings.WAVE_FIELD_TEC_FILE, custom_config,
                                   'WAVE_FIELD_TEC_FILE')

    dset = hdf5_data.get(structure.H5_SOLVER_USE_HIGHER_ORDER)
    utility.check_dataset_type(dset,
                               name=str(structure.H5_SOLVER_USE_HIGHER_ORDER_ATTR['description']),
                               location=structure.H5_SOLVER_USE_HIGHER_ORDER)
    use_higher_order = dset[0]

    dset = hdf5_data.get(structure.H5_SOLVER_USE_DIPOLES_IMPLEMENTATION)
    utility.check_dataset_type(dset,
                               name=str(structure.H5_SOLVER_USE_DIPOLES_IMPLEMENTATION_ATTR['description']),
                               location=structure.H5_SOLVER_USE_DIPOLES_IMPLEMENTATION)
    use_dipoles_implementation = dset[0]

    dset = hdf5_data.get(structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES)
    utility.check_dataset_type(dset,
                               name=str(structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES_ATTR['description']),
                               location=structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES)
    remove_irregular_frequencies = dset[0]

    # Wave Elevation computation and tec generation
    if result.n_theta < 1:
        tec_file = None
        logger.info('Wave elevation tecplot format generation is disabled because there is no directions (Kochin)')


    if tec_file:
        if use_higher_order != 1 and use_dipoles_implementation != 1 and remove_irregular_frequencies != 1:
            res = compute_wave_elevation(hdf5_data, environment, 0, 0, raos, result)
            save_wave_elevation(res['w'], res['etai'], res["etap"], res["eta"], res["x"], res["y"],
                            tec_file)
            logger.info('Wave elevation successfully saved in tecplot format at ' +
                        str(tec_file))
        else:
            logger.info('Wave elevation computation is not supported when higher order panel, ' +
                        'used diplome implementation or remove irregular frequencies are enabled.' +
                        ' Disabling it.')
    else:
        logger.info('Wave elevation tecplot format generation is disabled')