def convert_subtitles(closedcaption):
    str_output = ''
    subtitle_data = _connection.getURL(closedcaption, connectiontype=0)
    subtitle_data = BeautifulSoup(subtitle_data,
                                  'html.parser',
                                  parse_only=SoupStrainer('div'))
    lines = subtitle_data.find_all('p')
    for i, line in enumerate(lines):
        if line is not None:
            sub = clean_subs(_common.smart_utf8(line))
            start_time_hours, start_time_rest = line['begin'].split(':', 1)
            start_time_hours = '%02d' % (int(start_time_hours) - 1)
            start_time = _common.smart_utf8(start_time_hours + ':' +
                                            start_time_rest.replace('.', ','))
            end_time_hours, end_time_rest = line['end'].split(':', 1)
            end_time_hours = '%02d' % (int(end_time_hours) - 1)
            end_time = _common.smart_utf8(end_time_hours + ':' +
                                          end_time_rest.replace('.', ','))
            str_output += str(
                i + 1
            ) + '\n' + start_time + ' --> ' + end_time + '\n' + sub + '\n\n'
    file = open(SUBTITLE, 'w')
    file.write(str_output)
    file.close()
    return True
def convert_subtitles(closedcaption):
	str_output = ''
	subtitle_data = _connection.getURL(closedcaption, connectiontype = 0)
	subtitle_data = BeautifulSoup(subtitle_data, 'html.parser', parse_only = SoupStrainer('div'))
	srt_output = ''
	lines = subtitle_data.find_all('p')
	i = 0
	last_start_time = ''
	last_end_time = ''
	for line in lines:
		try:
			if line is not None:
				sub = clean_subs(_common.smart_utf8(line))
				start_time = _common.smart_utf8(line['begin'].replace('.', ','))
				end_time = _common.smart_utf8(line['end'].replace('.', ','))
				if start_time != last_start_time and end_time != last_end_time:
					str_output += '\n' + str(i + 1) + '\n' + start_time + ' --> ' + end_time + '\n' + sub + '\n'
					i = i + 1
					last_end_time = end_time
					last_start_time = start_time
				else:
					str_output +=  sub + '\n\n'
		except:
			pass
	file = open(_common.SUBTITLE, 'w')
	file.write(str_output)
	file.close()
Exemple #3
0
def convert_subtitles(video_guid):
    str_output = ''
    subtitle_data = _connection.getURL(CLOSEDCAPTION % video_guid,
                                       connectiontype=0)
    subtitle_data = simplejson.loads(subtitle_data)
    for i, subtitle_line in enumerate(subtitle_data):
        if subtitle_line is not None:
            sub = _common.smart_utf8(subtitle_line['metadata']['Text'])
            start_time = _common.smart_utf8(str(
                subtitle_line['startTime'])).split('.')
            start_minutes, start_seconds = divmod(int(start_time[0]), 60)
            start_hours, start_minutes = divmod(start_minutes, 60)
            start_time = '%02d:%02d:%02d,%02d' % (start_hours, start_minutes,
                                                  start_seconds,
                                                  int(start_time[1][0:2]))
            end_time = _common.smart_utf8(str(
                subtitle_line['endTime'])).split('.')
            end_minutes, end_seconds = divmod(int(end_time[0]), 60)
            end_hours, end_minutes = divmod(end_minutes, 60)
            end_time = '%02d:%02d:%02d,%02d' % (
                end_hours, end_minutes, end_seconds, int(end_time[1][0:2]))
            str_output += str(
                i + 1
            ) + '\n' + start_time + ' --> ' + end_time + '\n' + sub + '\n\n'
    file = open(SUBTITLE, 'w')
    file.write(str_output)
    file.close()
    return True
Exemple #4
0
def convert_subtitles(closedcaption):
    str_output = ''
    subtitle_data = _connection.getURL(closedcaption, connectiontype=0)
    subtitle_data = BeautifulSoup(subtitle_data,
                                  'html.parser',
                                  parse_only=SoupStrainer('div'))
    srt_output = ''
    lines = subtitle_data.find_all('p')
    i = 0
    last_start_time = ''
    last_end_time = ''
    for line in lines:
        try:
            if line is not None:
                sub = clean_subs(_common.smart_utf8(line))
                start_time = _common.smart_utf8(line['begin'].replace(
                    '.', ','))
                end_time = _common.smart_utf8(line['end'].replace('.', ','))
                if start_time != last_start_time and end_time != last_end_time:
                    str_output += '\n' + str(
                        i + 1
                    ) + '\n' + start_time + ' --> ' + end_time + '\n' + sub + '\n'
                    i = i + 1
                    last_end_time = end_time
                    last_start_time = start_time
                else:
                    str_output += sub + '\n\n'
        except:
            pass
    file = open(_common.SUBTITLE, 'w')
    file.write(str_output)
    file.close()
def convert_subtitles(closedcaption):
	str_output = ''
	j = 0
	count = 0
	for closedcaption_url, duration, i in closedcaption:
		count = int(i) + 1
		if closedcaption_url is not None:
			subtitle_data = _connection.getURL(closedcaption_url['src'], connectiontype = 0)
			subtitle_data = BeautifulSoup(subtitle_data, 'html.parser', parse_only = SoupStrainer('div'))
			lines = subtitle_data.find_all('p')
			last_line = lines[-1]
			end_time = last_line['end'].split('.')[0].split(':')
			file_duration = int(end_time[0]) * 60 * 60 + int(end_time[1]) * 60 + int(end_time[2])
			delay = int(file_duration) - int(duration)
			for i, line in enumerate(lines):
				if line is not None:
					try:
						sub = clean_subs(_common.smart_utf8(line))
						start_time = _common.smart_utf8(datetime.datetime.strftime(datetime.datetime.strptime(line['begin'], '%H:%M:%S.%f') - datetime.timedelta(seconds = int(delay)),'%H:%M:%S,%f'))[:-4]
						end_time = _common.smart_utf8(datetime.datetime.strftime(datetime.datetime.strptime(line['end'], '%H:%M:%S.%f') - datetime.timedelta(seconds = int(delay)),'%H:%M:%S,%f'))[:-4]
						str_output += str(j + i + 1) + '\n' + start_time + ' --> ' + end_time + '\n' + sub + '\n\n'
					except:
						pass
			j = j + i + 1
			file = open(os.path.join(_common.CACHEPATH, 'subtitle-%s.srt' % str(count)), 'w')
			file.write(str_output)
			str_output=''
			file.close()
Exemple #6
0
def convert_subtitles(closedcaption):
    str_output = ''
    subtitle_data = _connection.getURL(closedcaption, connectiontype=0)
    subtitle_data = BeautifulSoup(subtitle_data,
                                  'html.parser',
                                  parse_only=SoupStrainer('div'))
    lines = subtitle_data.find_all('p')
    for i, line in enumerate(lines):
        if line is not None:
            sub = clean_subs(_common.smart_utf8(line))
            start_time_rest, start_time_msec = line['begin'].rsplit(':', 1)
            start_time = _common.smart_utf8(start_time_rest + ',' +
                                            start_time_msec)
            try:
                end_time_rest, end_time_msec = line['end'].rsplit(':', 1)
                end_time = _common.smart_utf8(end_time_rest + ',' +
                                              end_time_msec)
            except:
                continue
            str_output += str(
                i + 1
            ) + '\n' + start_time + ' --> ' + end_time + '\n' + sub + '\n\n'
    file = open(_common.SUBTITLE, 'w')
    file.write(str_output)
    file.close()
def convert_subtitles(closedcaption, durations=[]):
    str_output = ''
    j = 0
    count = 0
    for closedcaption_url in closedcaption:
        count = count + 1
        subtitle_data = _connection.getURL(closedcaption_url['src'],
                                           connectiontype=0)
        subtitle_data = BeautifulSoup(subtitle_data,
                                      'html.parser',
                                      parse_only=SoupStrainer('div'))
        lines = subtitle_data.find_all('p')
        for i, line in enumerate(lines):
            if line is not None:
                sub = clean_subs(_common.smart_utf8(line))
                start_time = _common.smart_utf8(line['begin'][:-1].replace(
                    '.', ','))
                end_time = _common.smart_utf8(line['end'][:-1].replace(
                    '.', ','))
                str_output += str(
                    j + i + 1
                ) + '\n' + start_time + ' --> ' + end_time + '\n' + sub + '\n\n'
        j = j + i + 1
        file = open(
            os.path.join(_common.CACHEPATH, 'subtitle-%s.srt' % int(count)),
            'w')
        file.write(str_output)
        str_output = ''
        file.close()
Exemple #8
0
def convert_subtitles(closedcaption):
	str_output = ''
	subtitle_data = _connection.getURL(closedcaption, connectiontype = 0)
	subtitle_data = BeautifulSoup(subtitle_data, 'html.parser', parse_only = SoupStrainer('div'))
	srt_output = ''
	lines = subtitle_data.find_all('p')
	for i, line in enumerate(lines):
		if line is not None:
			sub = clean_subs(_common.smart_utf8(line))
			start_time = _common.smart_utf8(line['begin'].replace('.', ','))
			end_time = _common.smart_utf8(line['end'].replace('.', ','))
			str_output += str(i + 1) + '\n' + start_time + ' --> ' + end_time + '\n' + sub + '\n\n'
	file = open(_common.SUBTITLE, 'w')
	file.write(str_output)
	file.close()
def convert_subtitles(closedcaption):
	str_output = ''
	subtitle_data = _connection.getURL(closedcaption, connectiontype = 0)
	subtitle_data = clean_subs(_common.smart_utf8(subtitle_data))
	file = open(_common.SUBTITLESMI, 'w')
	file.write(subtitle_data)
	file.close()
def masterlist():
	master_start = 0
	master_count = 200
	master_db = []
	master_dict = {}
	master_check = []
	master_menu = simplejson.loads(_connection.getURL(SHOWS, header = {'X-Requested-With' : 'XMLHttpRequest'}))
	for master_item in master_menu.itervalues():
		for master_item in master_item:
			master_check.append(master_item['title'])
	while master_start < master_count:
		master_data = cove.programs.filter(fields = 'mediafiles', order_by = 'title', limit_start = master_start)
		master_menu = master_data['results']
		master_count = master_data['count']
		master_stop = master_data['stop']
		del master_data
		for master_item2 in master_menu:
			website = master_item2['website']
			if website is None:
				website = ''
			if master_item2['title'] in master_check and ('PBS Kids' !=  master_item2['nola_root']) and ('blog' not in website):
				master_name = _common.smart_utf8(master_item2['title'])
				tvdb_name = _common.get_show_data(master_name, SITE, 'seasons')[-1]
				season_url = re.compile('/cove/v1/programs/(.*?)/').findall(master_item2['resource_uri'])[0]
				if tvdb_name not in master_dict.keys():
					master_dict[tvdb_name] = _common.smart_unicode(master_name) + '#' +season_url
				else:
					master_dict[tvdb_name] = master_dict[tvdb_name] + ',' + master_name + '#' + season_url
		master_start = master_stop
	for master_name in master_dict:
		season_url = master_dict[master_name]
		master_db.append((master_name, SITE, 'seasons', season_url))
	return master_db
def masterlist():
	master_db = []
	master_menu = simplejson.loads(_connection.getURL(SHOWS))
	for master_item in master_menu['items']:
		master_name = _common.smart_utf8(master_item['title'])
		master_db.append((master_name, SITE, 'seasons', urllib.quote_plus(master_name)))
	return master_db
def masterlist():
	master_db = []
	master_dict = {}
	master_data = _connection.getURL(SHOWS)
	master_tree = BeautifulSoup(master_data, 'html.parser')
	master_menu = master_tree.allcollections.find_all('collection')
	for master_item in master_menu:
		master_name = _common.smart_utf8(master_item['name'])
		if '[AD]' not in master_name:
			tvdb_name = _common.get_show_data(master_name, SITE, 'seasons')[-1]
			season_url = master_item['id'] 
			season_url = season_url + '#tveepisodes='
			try:
				for season in master_item.tveepisodes.find_all('season'):
					season_url = season_url + '-' + season['number']
			except:
				pass
			season_url = season_url + '#clips='
			try:
				for season in master_item.clips.find_all('season'):
					if season['number'] != '':
						season_url = season_url + '-' + season['number']
				else:
					season_url = season_url + '-' + '*'
			except:
				pass
			master_db.append((master_name,  SITE, 'seasons', season_url))
	return master_db
Exemple #13
0
def rootlist():
	root_start = 0
	root_count = 200
	root_check = []
	root_menu = simplejson.loads(_connection.getURL(SHOWS, header = {'X-Requested-With' : 'XMLHttpRequest'}))
	for root_item in root_menu.itervalues():
		for root_item in root_item:
			root_check.append(root_item['title'])
	while root_start < root_count:
		root_data = cove.programs.filter(fields = 'mediafiles', order_by = 'title', limit_start = root_start)
		root_menu = root_data['results']
		root_count = root_data['count']
		root_stop = root_data['stop']
		del root_data
		for root_item2 in root_menu:
			if root_item2['website'] is None:
				root_website = ''
			else:
				root_website = root_item2['website']
			if (root_item2['title'] in root_check) and ('pbskids.org' not in root_website):
				root_name = _common.smart_utf8(root_item2['title'])
				season_url = re.compile('/cove/v1/programs/(.*?)/').findall(root_item2['resource_uri'])[0]
				_common.add_show(root_name,  SITE, 'seasons', season_url)
		root_start = root_stop
	_common.set_view('tvshows')
Exemple #14
0
def masterlist():
	master_start = 0
	master_count = 200
	master_db = []
	master_check = []
	master_menu = simplejson.loads(_connection.getURL(SHOWS, header = {'X-Requested-With' : 'XMLHttpRequest'}))
	for master_item in master_menu.itervalues():
		for master_item in master_item:
			master_check.append(master_item['title'])
	while master_start < master_count:
		master_data = cove.programs.filter(fields = 'mediafiles', order_by = 'title', limit_start = master_start)
		master_menu = master_data['results']
		master_count = master_data['count']
		master_stop = master_data['stop']
		del master_data
		for master_item2 in master_menu:
			if master_item2['website'] is None:
				master_website = ''
			else:
				master_website = master_item2['website']
			if master_item2['title'] in master_check and ('pbskids.org' not in master_website):
				master_name = _common.smart_utf8(master_item2['title'])
				season_url = re.compile('/cove/v1/programs/(.*?)/').findall(master_item2['resource_uri'])[0]
				master_db.append((master_name, SITE, 'seasons', season_url))
		master_start = master_stop
	return master_db
def rootlist():
	show_data = _connection.getURL(SHOWS)
	show_tree = BeautifulSoup(show_data, 'html5lib')
	show_menu = show_tree.find('div', id='shows-list').find_all('a')
	for show_item in show_menu:
		show_name = _common.smart_utf8(show_item.text)
		_common.add_show(show_name,  SITE, 'seasons', show_item['href'])
	_common.set_view('tvshows')
Exemple #16
0
def masterlist():
    master_db = []
    master_data = _connection.getURL(SHOWS)
    master_tree = BeautifulSoup(master_data)
    master_menu = master_tree.find('div', id='shows-list').find_all('a')
    for master_item in master_menu:
        master_name = _common.smart_utf8(master_item.text)
        master_db.append((master_name, SITE, 'seasons', master_item['href']))
    return master_db
Exemple #17
0
def rootlist():
    root_data = _connection.getURL(SHOWS)
    root_tree = BeautifulSoup(root_data)
    root_menu = root_tree.footer.find_all('li', class_='views-row')
    for root_item in root_menu:
        root_name = _common.smart_utf8(root_item.text.strip())
        season_url = root_item.a['href']
        _common.add_show(root_name, SITE, 'seasons', season_url)
    _common.set_view('tvshows')
def masterlist():
	master_start = 0
	master_count = 200
	master_db = []
	master_menu = simplejson.loads(_connection.getURL(SHOWS))
	for master_item in master_menu['items']:
		master_name = _common.smart_utf8(master_item['title'])
		master_db.append((master_name, SITE, 'seasons', urllib.quote_plus(master_name)))
	return master_db
Exemple #19
0
def convert_subtitles(closedcaption):
	str_output = ''
	subtitle_data = _connection.getURL(closedcaption, connectiontype = 0)
	subtitle_data = BeautifulSoup(subtitle_data, 'html.parser', parse_only = SoupStrainer('div'))
	lines = subtitle_data.find_all('p')
	for i, line in enumerate(lines):
		if line is not None:
			sub = clean_subs(_common.smart_utf8(line))
			start_time = _common.smart_utf8(line['begin'].replace('.', ','))
			if ',' not in start_time:
				start_time = start_time + ',00'
			end_time = _common.smart_utf8(line['end'].replace('.', ','))
			if ',' not in end_time:
				end_time = end_time + ',00'
			str_output += str(i + 1) + '\n' + start_time[:11] + ' --> ' + end_time[:11] + '\n' + sub + '\n\n'
	file = open(_common.SUBTITLE, 'w')
	file.write(str_output)
	file.close()
def rootlist():
	root_data = _connection.getURL(SHOWS)
	root_tree = BeautifulSoup(root_data)
	root_menu = root_tree.footer.find_all('li', class_ = 'views-row')
	for root_item in root_menu:
		root_name = _common.smart_utf8(root_item.text.strip())
		season_url = root_item.a['href']
		_common.add_show(root_name, SITE, 'seasons', season_url)
	_common.set_view('tvshows')
def masterlist():
	master_db = []
	master_data = _connection.getURL(SHOWS)
	master_tree = BeautifulSoup(master_data)
	master_menu = master_tree.find('div', id= 'shows-list').find_all('a')
	for master_item in master_menu:
		master_name = _common.smart_utf8(master_item.text)
		master_db.append((master_name, SITE, 'seasons', master_item['href']))
	return master_db
Exemple #22
0
def masterlist():
    master_db = []
    master_data = _connection.getURL(SHOWS)
    master_tree = BeautifulSoup(master_data, 'html5lib')
    master_menu = master_tree.footer.find_all('li', class_='views-row')
    for master_item in master_menu:
        master_name = _common.smart_utf8(master_item.text.strip())
        season_url = master_item.a['href']
        master_db.append((master_name, SITE, 'seasons', season_url))
    return master_db
def masterlist():
	master_db = []
	master_data = _connection.getURL(SHOWS)
	master_tree = BeautifulSoup(master_data, 'html5lib')
	master_menu = master_tree.footer.find_all('li', class_ = 'views-row')
	for master_item in master_menu:
		master_name = _common.smart_utf8(master_item.text.strip())
		season_url = master_item.a['href']
		master_db.append((master_name, SITE, 'seasons', season_url))
	return master_db
def convert_subtitles(closedcaption):
	str_output = ''
	subtitle_data = _connection.getURL(closedcaption, connectiontype = 0)
	subtitle_data = BeautifulSoup(subtitle_data, 'html.parser', parse_only = SoupStrainer('div'))
	lines = subtitle_data.find_all('p')
	for i, line in enumerate(lines):
		if line is not None:
			sub = clean_subs(_common.smart_utf8(line))
			start_time_rest, start_time_msec = line['begin'].rsplit(':',1)
			start_time = _common.smart_utf8(start_time_rest + ',' + start_time_msec)
			try:
				end_time_rest, end_time_msec = line['end'].rsplit(':',1)
				end_time = _common.smart_utf8(end_time_rest + ',' + end_time_msec)
			except:
				continue
			str_output += str(i + 1) + '\n' + start_time + ' --> ' + end_time + '\n' + sub + '\n\n'
	file = open(SUBTITLE, 'w')
	file.write(str_output)
	file.close()
def convert_subtitles(video_guid):
	str_output = ''
	subtitle_data = _connection.getURL(CLOSEDCAPTION % video_guid, connectiontype = 0)
	subtitle_data = simplejson.loads(subtitle_data)
	for i, subtitle_line in enumerate(subtitle_data):
		if subtitle_line is not None:
			sub = _common.smart_utf8(subtitle_line['metadata']['Text'])
			start_time = _common.smart_utf8(str(subtitle_line['startTime'])).split('.')
			start_minutes, start_seconds = divmod(int(start_time[0]), 60)
			start_hours, start_minutes = divmod(start_minutes, 60)
			start_time = '%02d:%02d:%02d,%02d' % (start_hours, start_minutes, start_seconds, int(start_time[1][0:2]))
			end_time = _common.smart_utf8(str(subtitle_line['endTime'])).split('.')
			end_minutes, end_seconds = divmod(int(end_time[0]), 60)
			end_hours, end_minutes = divmod(end_minutes, 60)
			end_time = '%02d:%02d:%02d,%02d' % (end_hours, end_minutes, end_seconds, int(end_time[1][0:2]))
			str_output += str(i + 1) + '\n' + start_time + ' --> ' + end_time + '\n' + sub + '\n\n'
	file = open(_common.SUBTITLE, 'w')
	file.write(str_output)
	file.close()
def convert_subtitles(closedcaption,durations=[]):
	str_output = ''
	j = 0
	count = 0
	for closedcaption_url in closedcaption:
		count = count + 1
		subtitle_data = _connection.getURL(closedcaption_url['src'], connectiontype = 0)
		subtitle_data = BeautifulSoup(subtitle_data, 'html.parser', parse_only = SoupStrainer('div'))
		lines = subtitle_data.find_all('p')
		for i, line in enumerate(lines):
			if line is not None:
				sub = clean_subs(_common.smart_utf8(line))
				start_time = _common.smart_utf8(line['begin'][:-1].replace('.', ','))
				end_time = _common.smart_utf8(line['end'][:-1].replace('.', ','))
				str_output += str(j + i + 1) + '\n' + start_time + ' --> ' + end_time + '\n' + sub + '\n\n'
		j = j + i + 1
		file = open(os.path.join(_common.CACHEPATH,'subtitle-%s.srt' % int(count)), 'w')
		file.write(str_output)
		str_output=''
		file.close()
def seasons(SITE, FULLEPISODES, CLIPSSEASON, CLIPS):
	show_id = _common.args.url
	master_name = show_id.split('#')[0]
	show_id = show_id.split('#')[1]
	_common.add_directory('Full Episodes',  SITE, 'episodes', master_name + '#' + FULLEPISODES % show_id)
	clips_data = _connection.getURL(CLIPSSEASON % show_id)
	clips_menu = simplejson.loads(clips_data)
	for season in clips_menu:
		clip_name = _common.smart_utf8(season['title'])
		_common.add_directory(clip_name,  SITE, 'episodes', master_name + '#' + CLIPS % (show_id, season['id']))
	_common.set_view('seasons')
def masterlist(NAME, MOVIES, SHOWS, SITE):
	master_db = []
	master_dict = {}
	master_db.append(('--' + NAME + ' Movies',  SITE, 'episodes', 'Movie#' + MOVIES))
	master_data = _connection.getURL(SHOWS)
	master_menu = simplejson.loads(master_data)
	for master_item in master_menu:
		master_name = _common.smart_utf8(master_item['title'])
		season_url = master_name + '#' + master_item['ID'] 
		master_db.append((master_name,  SITE, 'seasons', season_url))
	return master_db
Exemple #29
0
def masterlist(NAME, MOVIES, SHOWS, SITE):
    master_db = []
    master_dict = {}
    master_db.append(
        ('--' + NAME + ' Movies', SITE, 'episodes', 'Movie#' + MOVIES))
    master_data = _connection.getURL(SHOWS)
    master_menu = simplejson.loads(master_data)
    for master_item in master_menu:
        master_name = _common.smart_utf8(master_item['title'])
        season_url = master_name + '#' + master_item['ID']
        master_db.append((master_name, SITE, 'seasons', season_url))
    return master_db
Exemple #30
0
def seasons(SITE, FULLEPISODES, CLIPSSEASON, CLIPS):
    show_id = _common.args.url
    master_name = show_id.split('#')[0]
    show_id = show_id.split('#')[1]
    _common.add_directory('Full Episodes', SITE, 'episodes',
                          master_name + '#' + FULLEPISODES % show_id)
    clips_data = _connection.getURL(CLIPSSEASON % show_id)
    clips_menu = simplejson.loads(clips_data)
    for season in clips_menu:
        clip_name = _common.smart_utf8(season['title'])
        _common.add_directory(
            clip_name, SITE, 'episodes',
            master_name + '#' + CLIPS % (show_id, season['id']))
    _common.set_view('seasons')
def convert_subtitles(closedcaption, durations=[]):
    str_output = ""
    j = 0
    count = 0
    for closedcaption_url in closedcaption:
        count = count + 1
        if closedcaption_url is not None:
            subtitle_data = _connection.getURL(closedcaption_url["src"], connectiontype=0)
            subtitle_data = BeautifulSoup(subtitle_data, "html.parser", parse_only=SoupStrainer("div"))
            lines = subtitle_data.find_all("p")
            for i, line in enumerate(lines):
                if line is not None:
                    sub = clean_subs(_common.smart_utf8(line))
                    try:
                        start_time = _common.smart_utf8(line["begin"][:-1].replace(".", ","))
                        end_time = _common.smart_utf8(line["end"][:-1].replace(".", ","))
                        str_output += str(j + i + 1) + "\n" + start_time + " --> " + end_time + "\n" + sub + "\n\n"
                    except:
                        pass
            j = j + i + 1
            file = open(os.path.join(_common.CACHEPATH, "subtitle-%s.srt" % int(count)), "w")
            file.write(str_output)
            str_output = ""
            file.close()
Exemple #32
0
def masterlist():
	master_db = []
	master_dict = {}
	master_data = _connection.getURL(SHOWS)
	master_tree = plistlib.readPlistFromString(master_data)
	for master_item in master_tree:
		master_name = _common.smart_utf8(master_item['name'])
		tvdb_name = _common.get_show_data(master_name, SITE, 'seasons')[-1]
		if tvdb_name not in master_dict.keys():
			master_dict[tvdb_name] = master_item['show-id']
		else:
			master_dict[tvdb_name] = master_dict[tvdb_name] + ',' + master_item['show-id']
	for master_name in master_dict:
		season_url = master_dict[master_name]
		master_db.append((master_name,  SITE, 'seasons', season_url))
	return master_db
def rootlist():
	root_dict = {}
	root_data = _connection.getURL(SHOWS)
	root_tree = BeautifulSoup(root_data, 'html5lib')
	root_menu = root_tree.allcollections.find_all('collection')
	for root_item in root_menu:
		root_name = _common.smart_utf8(root_item['name'])
		tvdb_name = _common.get_show_data(root_name, SITE, 'seasons')[-1]
		if tvdb_name not in root_dict.keys():
			root_dict[tvdb_name] = root_item['id']
		else:
			root_dict[tvdb_name] = root_dict[tvdb_name] + ',' + root_item['id']
	for root_name in root_dict:
		season_url = root_dict[root_name]
		_common.add_show(root_name,  SITE, 'seasons', season_url)
	_common.set_view('tvshows')
def masterlist(NAME, MOVIES, SHOWS, SITE):
	master_db = []
	master_dict = {}
	master_db.append(('--' + NAME + ' Movies',  SITE, 'episodes', 'Movie#' + MOVIES))
	master_data = _connection.getURL(SHOWS)
	master_menu = simplejson.loads(master_data)
	for master_item in master_menu:
		master_name = _common.smart_utf8(master_item['title'])
		if 'ondemandEpisodes' in master_item['excludedSections']:
			has_full_eps = 'false'
		else:
			has_full_eps = 'true'
		if (_addoncompat.get_setting('hide_clip_only') == 'false' and 'clips' not in master_item['excludedSections']) or has_full_eps == 'true':
			season_url = master_name + '#' + master_item['ID'] + '#' + has_full_eps
			master_db.append((master_name,  SITE, 'seasons', season_url))
	return master_db
def masterlist():
    master_db = []
    master_dict = {}
    master_data = _connection.getURL(SHOWS)
    master_tree = plistlib.readPlistFromString(master_data)
    for master_item in master_tree:
        master_name = _common.smart_utf8(master_item["name"])
        tvdb_name = _common.get_show_data(master_name, SITE, "seasons")[-1]
        if tvdb_name not in master_dict.keys():
            master_dict[tvdb_name] = master_item["show-id"]
        else:
            master_dict[tvdb_name] = master_dict[tvdb_name] + "," + master_item["show-id"]
    for master_name in master_dict:
        season_url = master_dict[master_name]
        master_db.append((master_name, SITE, "seasons", season_url))
    return master_db
def masterlist():
	master_db = []
	master_dict = {}
	master_data = _connection.getURL(SHOWS)
	master_tree = BeautifulSoup(master_data, 'html5lib')
	master_menu = master_tree.allcollections.find_all('collection')
	for master_item in master_menu:
		master_name = _common.smart_utf8(master_item['name'])
		tvdb_name = _common.get_show_data(master_name, SITE, 'seasons')[-1]
		if tvdb_name not in master_dict.keys():
			master_dict[tvdb_name] = master_item['id']
		else:
			master_dict[tvdb_name] = master_dict[tvdb_name] + ',' + master_item['id']
	for master_name in master_dict:
		season_url = master_dict[master_name]
		master_db.append((master_name,  SITE, 'seasons', season_url))
	return master_db
Exemple #37
0
def convert_subtitles(video_guid):
	try:
		file = None
		dialog = xbmcgui.DialogProgress()
        	dialog.create(_common.smart_utf8(xbmcaddon.Addon(id = _common.ADDONID).getLocalizedString(39026)))
		dialog.update(0, _common.smart_utf8(xbmcaddon.Addon(id = _common.ADDONID).getLocalizedString(39027)))

		str_output = ''
		subtitle_data = _connection.getURL(CLOSEDCAPTION % video_guid, connectiontype = 0)
		subtitle_data = simplejson.loads(subtitle_data)
		lines_total = len(subtitle_data)

		dialog.update(0, _common.smart_utf8(xbmcaddon.Addon(id = _common.ADDONID).getLocalizedString(39028)))
		for i, subtitle_line in enumerate(subtitle_data):
	       	        if subtitle_line is not None and 'Text' in subtitle_line['metadata']:
				
				if (dialog.iscanceled()):
					return

				if i % 10 == 0:
					percent = int( (float(i*100) / lines_total) )
					dialog.update(percent, _common.smart_utf8(xbmcaddon.Addon(id = _common.ADDONID).getLocalizedString(30929)))

				sub = _common.smart_utf8(subtitle_line['metadata']['Text'])
				start_time = _common.smart_utf8(str(subtitle_line['startTime'])).split('.')
				start_minutes, start_seconds = divmod(int(start_time[0]), 60)
				start_hours, start_minutes = divmod(start_minutes, 60)
				start_time = '%02d:%02d:%02d,%02d' % (start_hours, start_minutes, start_seconds, int(start_time[1][0:2]))
				end_time = _common.smart_utf8(str(subtitle_line['endTime'])).split('.')
				end_minutes, end_seconds = divmod(int(end_time[0]), 60)
				end_hours, end_minutes = divmod(end_minutes, 60)
				end_time = '%02d:%02d:%02d,%02d' % (end_hours, end_minutes, end_seconds, int(end_time[1][0:2]))
				str_output += str(i + 1) + '\n' + start_time + ' --> ' + end_time + '\n' + sub + '\n\n'
		file = open(_common.SUBTITLE, 'w')
		file.write(str_output)
		file.close()
	except Exception, e:
		print "Exception: " + unicode(e)
		_common.show_exception(NAME, xbmcaddon.Addon(id = _common.ADDONID).getLocalizedString(39030))
def rootlist():
	root_menu = simplejson.loads(_connection.getURL(SHOWS))
	for root_item in root_menu['items']:
		root_name = _common.smart_utf8(root_item['title'])
		_common.add_show(root_name,  SITE, 'seasons', urllib.quote_plus(root_name))
	_common.set_view('tvshows')