def admin_organization_notes(request, id=None): """ Постмодерация описания организаций """ if request.POST: note = request.POST.get('note', False) if note != False: org = Organization.objects.get(pk=id) org.note = note org.note_accept = True org.save() return HttpResponseRedirect(reverse("admin_organization_notes")) if id: warnings = [u"script", u"javascript", u"src", u"img", u"%68%74%74%70%3A%2F%2F", u"http", u"http", u"aHR0cDovLw==", u"href", u"http", u"https", u"ftp"] org = Organization.objects.get(pk=id) warning = False if org.note: for i in warnings: if i in org.note: warning = True break return render_to_response('organizations/notes_accept.html', {'org': org, 'warning': warning}, context_instance=RequestContext(request)) else: orgs = Organization.objects.filter(note_accept=False) page = request.GET.get('page') try: page = int(page) except (ValueError, TypeError): page = 1 p, page = pagi(page, orgs, 5) return render_to_response('organizations/notes_accept.html', {'p': p, 'page': page}, context_instance=RequestContext(request))
def admin_organization_actions(request): log = ActionsLog.objects.select_related('profile').filter(object=1).order_by('-dtime') orgs_ids = set([i.object_id for i in log]) orgs = Organization.objects.filter(pk__in=orgs_ids) orgs_dict = {} for i in orgs: orgs_dict[i.id] = i logs = [] for i in log: org = orgs_dict.get(i.object_id) if org: logs.append({'log': i, 'org': org.name}) else: logs.append({'log': i, 'org': ''}) page = request.GET.get('page') try: page = int(page) except (ValueError, TypeError): page = 1 p, page = pagi(page, logs, 15) return render_to_response('organizations/actions.html', {'p': p, 'page': page}, context_instance=RequestContext(request))
def get_user_list(request): ''' Список всех пользователей ''' current_site = request.current_site if current_site.domain in ('kinoinfo.ru', 'kinoafisha.in.ua'): template = 'api/user_controls.html' elif current_site.domain == 'umru.net': template = 'panel/umrunet_user_controls.html' users = Profile.objects.select_related( 'user', 'person').filter(site=current_site.id).order_by('id') result = {'status': 'user_list'} page = request.GET.get('page') try: page = int(page) except (ValueError, TypeError): page = 1 p, page = pagi(page, users, 10) return render_to_response(template, { 'result': result, 'p': p, 'page': page }, context_instance=RequestContext(request))
def organizations(request): """ Список всех организаций """ tags = set(list(Organization.objects.all().values_list('tags', flat=True))) tags = OrganizationTags.objects.filter(pk__in=tags).order_by('name') if request.POST: tag = int(request.POST['tags']) else: tag = request.session.get('filter_organizations_show__tag', tags[0].id) orgs = Organization.objects.filter(tags__id=tag) count = Organization.objects.all().count() cat_count = orgs.count() page = request.GET.get('page') try: page = int(page) except (ValueError, TypeError): page = 1 p, page = pagi(page, orgs, 12) request.session['filter_organizations_show__tag'] = tag return render_to_response('organizations/organizations_show.html', {'p': p, 'page': page, 'count': count, 'cat_count': cat_count, 'tags': tags, 'tag': tag}, context_instance=RequestContext(request))
def daniya_films(request): file = open('%s/daniya_films.txt' % settings.API_EX_PATH, 'r') ids = [int(i) for i in file.readlines()] file.close() films = [] for i in ids: films.append({ 'id': int(i), 'url': 'http://www.kinoafisha.ru/?status=1&id1=%s' % i }) if request.POST: add_url = request.POST.get('add_url') del_url = request.POST.get('del_url') if add_url: url = request.POST.get('url') if url: result = re.findall(r'id1\=\d+', url) if result: result = result[0].split('=')[1] file = open('%s/daniya_films.txt' % settings.API_EX_PATH, 'a') file.write('%s\n' % result) file.close() elif del_url: checker = [int(i) for i in request.POST.getlist('checker')] file = open('%s/daniya_films.txt' % settings.API_EX_PATH, 'w') for i in ids: if i not in checker: file.write('%s\n' % i) file.close() return HttpResponseRedirect(reverse('daniya_films')) films = sorted(films, key=operator.itemgetter('id')) page = request.GET.get('page') try: page = int(page) except (ValueError, TypeError): page = 1 p, page = pagi(page, films, 15) return render_to_response('api/daniya_films.html', { 'p': p, 'page': page }, context_instance=RequestContext(request))
def torrents_listing(request, source): from slideblok.views import releasedata from news.views import cut_description from movie_online.IR import check_int_rates_inlist from release_parser.func import get_file_modify_datetime source = ImportSources.objects.get(pk=source) today = datetime.datetime.now().date() yesterday = today - datetime.timedelta(days=1) file_add = 'xml' if source.url == 'http://rutracker.org/' else 'html' file_path = '%s__%s__films.xml' % (file_add, source.dump) parser_time = get_file_modify_datetime(settings.SUCCESS_LOG_PATH, file_path) parser_time = parser_time + datetime.timedelta(hours=3) release, new, rate = (None, None, None) release_filter = { 0: {'id': 0, 'name': u'ВСЕ'}, 1: {'id': 1, 'name': u'Кинопрокат'}, 2: {'id': 2, 'name': u'Прочие'}, } new_filter = { 0: {'id': 0, 'name': u'ВСЕ'}, 1: {'id': 1, 'name': u'Новые'}, } rate_filter = [u'ВСЕ',] if request.POST: if 'checker' in request.POST: checker = request.POST.getlist('checker') if checker: SourceFilms.objects.filter(source_obj=source, pk__in=checker).update(rel_ignore=True) return HttpResponseRedirect(reverse('torrents_listing', kwargs={'source': source.pk})) else: release = request.POST.get('release') new = request.POST.get('new') rate = request.POST.get('rate') kids = list(SourceFilms.objects.filter(source_obj=source, rel_ignore=False).values_list('kid', flat=True)) rates = check_int_rates_inlist(kids) rates_tmp = set([i['int_rate'] for i in rates.values() if i['int_rate']]) for i in rates_tmp: rate_filter.append(i) if rate: try: rate = int(rate) except ValueError: rate = u'ВСЕ' if release: release = int(release) if new: new = int(new) sess_filter = request.session.get('torrents_listing_filter',{}) if rate == None and release == None and new == None: sess_filter = request.session.get('torrents_listing_filter',{}) if sess_filter: rate = sess_filter['rate'] release = sess_filter['release'] new = sess_filter['new'] else: new = 1 if not new: source_films = SourceFilms.objects.filter(source_obj=source, rel_ignore=False) elif new == 1: source_films = SourceFilms.objects.filter(source_obj=source, extra='new', rel_ignore=False) tmp_date = datetime.datetime(3000,1,1) films = {} for i in source_films: new = True if i.extra == 'new' else False films[i.kid] = {'source_id': i.source_id, 'new': new, 'pk': i.id, 'release': tmp_date, 'kid': i.kid} for i in list(Film.objects.using('afisha').filter(pk__in=films.keys(), date__gte=datetime.datetime(1900,1,1)).values('id', 'date')): films[i['id']]['release'] = i['date'] films_sorted = sorted(films.values(), key=operator.itemgetter('release'), reverse=True) page = request.GET.get('page') try: page = int(page) except (ValueError, TypeError): page = 1 p, page = pagi(page, films_sorted, 100) films_ids = {} for i in p.object_list: films_ids[i['kid']] = {} torrents = {} for i in Torrents.objects.filter(film__in=films_ids.keys()).exclude(path=None): if not torrents.get(i.film): torrents[i.film] = {'0': None, '1': None, '2': None} q = i.quality_avg if i.quality_avg else '1' torrents[i.film][q] = i data_tmp = releasedata(films_ids, {}, persons=False, likes=False, trailers=False, reviews=False, poster_size='small') upd = [] data = [] for i in data_tmp: torrent = torrents.get(i['id']) f = films.get(i['id']) txt_cut = cut_description(i['descript'], True, 150) i['descript_cut'] = txt_cut i['descript'] = '' i['new'] = f['new'] i['source_id'] = f['pk'] i['torrent'] = torrent if not i['release_date']: i['release_date'] = datetime.datetime(3000,1,1) if source.url == 'http://cinemate.cc/': i['source_url'] = u'http://cinemate.cc/movie/%s/' % f['source_id'] elif source.url == 'http://rutracker.org/': i['source_url'] = u'http://rutracker.org/forum/viewtopic.php?t=%s' % f['source_id'] if f['new']: upd.append(f['pk']) next = True if release == 1 and i['release_date'].year == 3000: next = False elif release == 2 and i['release_date'].year < 3000: next = False if next: if rate: if rate == u'ВСЕ' or rate == int(i['rate']): data.append(i) else: data.append(i) films = sorted(data, key=operator.itemgetter('release_date'), reverse=True) SourceFilms.objects.filter(pk__in=upd).update(extra=None) request.session['torrents_listing_filter'] = {'rate': rate, 'new': new, 'release': release} return render_to_response('kinoafisha/torrents_listing.html', {'data': films, 'source': source, 'p': p, 'page': page, 'release_filter': release_filter.values(), 'release': release, 'new_filter': new_filter.values(), 'new': new, 'rate_filter': rate_filter, 'rate': rate, 'parser_time': parser_time, 'today': today, 'yesterday': yesterday}, context_instance=RequestContext(request))
def new_torrents(request): from slideblok.views import releasedata from movie_online.IR import check_int_rates_inlist price = 100 interface = request.profile.personinterface year, genre, country, rate = (None, None, None, None) rate_filter = [u'ВСЕ',] year_filter = [u'ВСЕ',] genre_filter = {0: {'id': 0, 'name': u'ВСЕ'}} country_filter = {0: {'id': 0, 'name': u'ВСЕ'}} set_filter = False access = False if interface.money >= price or request.user.is_superuser: access = True torrents_kids = list(Torrents.objects.exclude(path=None).distinct('film').values('id', 'film')) kids = [i['film'] for i in torrents_kids] opinions = {'good': [], 'bad': []} for i in NewsFilms.objects.filter(kid__in=kids, message__visible=True, message__autor=request.profile, rate_1__gt=0).values('kid', 'rate'): if i['rate'] >= 4: opinions['good'].append(i['kid']) else: opinions['bad'].append(i['kid']) kids = set(kids) - set(opinions['bad']) exist_kids = [] for i in list(Film.objects.using('afisha').filter(pk__in=kids, date__gte=datetime.date(1900,1,1)).values('year', 'genre1', 'genre2', 'genre3', 'genre1__name', 'genre2__name', 'genre3__name', 'country', 'country2', 'country__name', 'country2__name', 'id')): if i['year']: year_filter.append(int(i['year'])) for j in ((i['genre1'], i['genre1__name']), (i['genre2'], i['genre2__name']), (i['genre3'], i['genre3__name'])): if j[0] and not genre_filter.get(j[0]): genre_filter[int(j[0])] = {'id': int(j[0]), 'name': j[1]} for j in ((i['country'], i['country__name']), (i['country2'], i['country2__name'])): if j[0] and not country_filter.get(j[0]): country_filter[int(j[0])] = {'id': int(j[0]), 'name': j[1]} exist_kids.append(i['id']) year_filter = sorted(set(year_filter), reverse=True) rates = check_int_rates_inlist(exist_kids) rates_tmp = set([i['int_rate'] for i in rates.values() if i['int_rate']]) for i in rates_tmp: rate_filter.append(i) if request.POST: if 'filter' in request.POST: try: year = int(request.POST.get('year')) except ValueError: pass try: genre = int(request.POST.get('genre')) except ValueError: pass try: country = int(request.POST.get('country')) except ValueError: pass try: rate = int(request.POST.get('rate')) except ValueError: pass if year not in year_filter: year = year_filter[0] if genre not in genre_filter: genre = genre_filter[0]['id'] if country not in country_filter: country = country_filter[0]['id'] if rate not in rate_filter: rate = rate_filter[0] if year != u'ВСЕ' or rate != u'ВСЕ' or genre != 0 or country != 0: set_filter = True genre_filter = sorted(genre_filter.values(), key=operator.itemgetter('name')) country_filter = sorted(country_filter.values(), key=operator.itemgetter('name')) filter = {'pk__in': kids, 'date__gte': datetime.date(1900,1,1)} if year != u'ВСЕ': filter['year__exact'] = year queries = [] if genre: for q in [Q(genre1__pk=genre), Q(genre2__pk=genre), Q(genre3__pk=genre)]: queries.append(q) if country: for q in [Q(country__pk=country), Q(country2__pk=country)]: queries.append(q) if queries: query = queries.pop() for item in queries: query |= item torrents_kids = list(Film.objects.using('afisha').filter(query, **filter).values('pk', 'date')) else: torrents_kids = list(Film.objects.using('afisha').filter(**filter).values('pk', 'date')) kids = [i['pk'] for i in torrents_kids] tusers = list(TorrentsUsers.objects.filter(profile=request.profile, torrent__film__in=kids, got=True).values_list('torrent__film', flat=True)) for i in torrents_kids: if not i['date']: i['date'] = datetime.datetime(3000,1,1) torrents_kids = sorted(torrents_kids, key=operator.itemgetter('date'), reverse=True) page = request.GET.get('page') try: page = int(page) except (ValueError, TypeError): page = 1 p, page = pagi(page, torrents_kids, 20) torrents_films_dict = {} for i in p.object_list: torrents_films_dict[i['pk']] = {} data = [] for i in releasedata(torrents_films_dict, {}, persons=True, likes=True, trailers=True, reviews=True, poster_size='big'): i['got'] = True if i['id'] in tusers else False if not i['release_date']: i['release_date'] = datetime.datetime(3000,1,1) if i['id'] in opinions['good']: i['opinion'] = True if rate == u'ВСЕ' or int(rate) == int(i['rate']): data.append(i) data = sorted(data, key=operator.itemgetter('release_date'), reverse=True) else: p = None page = 1 data = [] tmplt = 'kinoafisha/new_torrents.html' url_name = resolve(request.path_info).url_name return render_to_response(tmplt, {'data': data, 'page': page, 'p': p, 'url_name': url_name, 'year_filter': year_filter, 'genre_filter': genre_filter, 'country_filter': country_filter, 'year': year, 'genre': genre, 'country': country, 'access': access, 'rate_filter': rate_filter, 'rate': rate, 'set_filter': set_filter}, context_instance=RequestContext(request))
def api_users_2(request): groups = { '1': {'id': '1', 'name': 'Суперюзеры', 'filter': {'user__is_superuser': True}}, '2': {'id': '2', 'name': 'API клиенты', 'filter': {'user__groups': 1}}, '3': {'id': '3', 'name': 'Aвторизованные', 'filter': 'exclude'}, '4': {'id': '4', 'name': 'Остальные', 'filter': 'other'}, '5': {'id': '5', 'name': 'Результат поиска', 'filter': {}}, } group = None search = None date_now, date_past = (None, None) if request.POST: if 'search_btn' in request.POST: search = request.POST.get('user_search').strip() if 'date_btn' in request.POST: group = '4' date_now = request.POST.get('date_to', '').split('-') date_past = request.POST.get('date_from', '').split('-') if date_now and date_past: date_now = datetime.datetime(int(date_now[2]), int(date_now[1]), int(date_now[0]), 23, 59, 59) date_past = datetime.date(int(date_past[2]), int(date_past[1]), int(date_past[0])) else: date_now, date_past = (None, None) else: group = request.POST.get('user_group') if not group and not search: search = request.session.get('filter_api_users_2__search') if search: group = '5' else: del groups['5'] groups_list = groups.values() if not group: group = request.session.get('filter_api_users_2__group', groups_list[0]['id']) if not search and group == '5': group = groups_list[0]['id'] filter = groups.get(group) filter = filter['filter'] if search: users = Profile.objects.select_related('user').only('user', 'id').filter(Q(accounts__login__icontains = search) | Q(accounts__nickname__icontains = search) | Q(accounts__email__icontains = search) | Q(accounts__fullname__icontains = search) | Q(person__name__name__icontains = search, person__name__status=1, person__name__language__id=1)).distinct('user__id').order_by('user__id')[:100] else: if filter == 'exclude': users = Profile.objects.select_related('user').exclude(accounts=None).distinct('user__id').order_by('user__id') elif filter == 'other': if not date_past: date_now = datetime.datetime.now() date_past = date_now - datetime.timedelta(days=2) users = Profile.objects.select_related('user').filter(accounts=None, user__is_superuser=False, user__date_joined__gte=date_past, user__date_joined__lte=date_now).distinct('user__id').order_by('user__id') else: users = Profile.objects.select_related('user').filter(**filter).distinct('user__id').order_by('user__id') page = request.GET.get('page') try: page = int(page) except (ValueError, TypeError): page = 1 p, page = pagi(page, users, 15) peoples = set([i for i in p.object_list]) groups = {} for i in list(Group.objects.filter(name='API', user__profile__in=peoples).values_list('user', flat=True)): groups[i] = True users_x = [] for i in org_peoples(peoples): i['api_client'] = groups.get(i['id'], False) users_x.append(i) request.session['filter_api_users_2__group'] = group request.session['filter_api_users_2__search'] = search mlist_count = len(request.session.get('users_merge_list', [])) tmplt = 'music/admin_users.html' if request.subdomain else 'api/user_controls_2.html' return render_to_response(tmplt, {'groups': groups_list, 'group': group, 'p': p, 'page': page, 'search': search, 'users_x': users_x, 'date_now': date_now, 'date_past': date_past, 'mlist_count': mlist_count}, context_instance=RequestContext(request))
def answers_admin(request): lang = get_language() all_answers_ids = {} for i in list( News.objects.filter( reader_type='23', translation_for=None).order_by('-dtime').values( 'pk', 'qanswers')): all_answers_ids[i['pk']] = i['qanswers'] page = request.GET.get('page') try: page = int(page) except (ValueError, TypeError): page = 1 p, page = pagi(page, all_answers_ids.keys(), 8) answers_ids = {} for i in list( News.objects.filter(pk__in=p.object_list, reader_type='23').order_by('-dtime').values( 'pk', 'parent')): answers_ids[i['pk']] = i['parent'] q_ids = {} for i in list( News.objects.filter( pk__in=answers_ids.values(), reader_type='22', translation_for=None).distinct('pk').order_by('-dtime').values( 'questionanswer', 'pk')): if i['questionanswer']: q_ids[i['pk']] = i['questionanswer'] answers = News.objects.select_related( 'autor', 'language').filter(pk__in=answers_ids.keys()).order_by('-dtime') langs = [l[0] for l in settings.LANGUAGES] profiles = [] ids = [] translation = {} for i in answers: profiles.append(i.autor) ids.append(i.id) default_langs = {} for lg in langs: default_langs[lg] = False translation[i.id] = default_langs translation[i.id][i.language.code] = True for i in list( News.objects.filter(translation_for__in=ids, reader_type='23').values( 'translation_for', 'language__code')): translation[i['translation_for']][i['language__code']] = True peoples = org_peoples(set(profiles), dic=True) data = [] for i in answers: author = peoples.get(i.autor.user_id) text = cut_description(i.text, True, 60) qid = q_ids.get(i.parent_id) aid = all_answers_ids.get(i.id) translation_languages = translation.get(i.id, []) data.append({ 'id': aid, 'qid': qid, 'dtime': i.dtime, 'text': text, 'author': author, 'lang': i.language.code, 'translation': translation_languages, 'parent': i.parent_id }) vid = 99 if request.domain == '0.0.1:8000' else 95 return render_to_response('imiagroup/question_answer.html', { 'vid': vid, 'data': data, 'p': p, 'page': page, 'lang': lang, 'qtype': 'admin', 'qa_list_type': 'answers' }, context_instance=RequestContext(request))
def question_answer(request, tag=None, qtype=None): lang = get_language() do_query = True filter = {'reader_type': '22', 'language__code': lang} if tag: tag = tag.encode('utf-8') filter['tags__name'] = tag del filter['language__code'] elif qtype: if qtype in ('with', 'without'): if qtype == 'with': #query_result = list(News.objects.filter(Q(parent_rel__language__code=lang) | Q( translation_for__parent_rel__language__code=lang), language__code=lang, reader_type='22').distinct('pk').values('questionanswer', 'pk')) query_result_1 = list( News.objects.filter( parent_rel__language__code=lang, language__code=lang, reader_type='22').distinct('pk').values( 'questionanswer', 'pk')) query_result_2 = list( News.objects.filter( translation_for__parent_rel__language__code=lang, language__code=lang, reader_type='22').distinct('pk').values( 'questionanswer', 'pk')) query_result = query_result_1 + query_result_2 do_query = False elif qtype == 'without': query_result_tmp = list( News.objects.filter( language__code=lang, reader_type='22').exclude( translation_for__parent_rel__language__code=lang). distinct('pk').values_list('pk', flat=True)) query_result = list( News.objects.filter(pk__in=query_result_tmp).exclude( parent_rel__language__code=lang).distinct('pk').values( 'questionanswer', 'pk')) do_query = False else: raise Http404 if do_query: query_result = list( News.objects.filter(**filter).distinct('pk').values( 'questionanswer', 'pk')) q_ids = {} for i in query_result: if i['questionanswer']: q_ids[i['pk']] = i['questionanswer'] questions = News.objects.select_related( 'autor', 'language').filter(pk__in=q_ids.keys()).order_by('-dtime') page = request.GET.get('page') try: page = int(page) except (ValueError, TypeError): page = 1 p, page = pagi(page, questions, 8) profiles = [] ids = [] questionanswers = [] for i in p.object_list: profiles.append(i.autor) ids.append(i.id) qas = q_ids.get(i.id) if qas: questionanswers.append(qas) answers = {} for i in list( News.objects.filter(parent__questionanswer__id__in=questionanswers, reader_type='23', language__code=lang).values( 'pk', 'parent__questionanswer')): if not answers.get(i['parent__questionanswer']): answers[i['parent__questionanswer']] = 0 answers[i['parent__questionanswer']] += 1 peoples = org_peoples(set(profiles), dic=True) tags_list = set( list( NewsTags.objects.filter(news__reader_type='22').values_list( 'name', flat=True))) tags_all = {} for i in list( NewsTags.objects.filter(news__pk__in=ids).values( 'name', 'news__pk')): if not tags_all.get(i['news__pk']): tags_all[i['news__pk']] = [] tags_all[i['news__pk']].append(i['name']) data = [] for i in p.object_list: author = peoples.get(i.autor.user_id) tags = tags_all.get(i.id) text = cut_description(i.text, True, 60) qid = q_ids.get(i.id) answers_count = answers.get(qid, 0) data.append({ 'id': qid, 'dtime': i.dtime, 'subject': i.title, 'text': text, 'author': author, 'tags': tags, 'lang': i.language.code, 'answers': answers_count, 'views': i.views, }) vid = 99 if request.domain == '0.0.1:8000' else 95 return render_to_response('imiagroup/question_answer.html', { 'vid': vid, 'data': data, 'p': p, 'page': page, 'tags_list': tags_list, 'lang': lang, 'qtype': qtype, 'qa_list_type': 'questions' }, context_instance=RequestContext(request))
def question_answer_admin(request): lang = get_language() q_ids = {} for i in list( News.objects.filter( reader_type='22', translation_for=None).distinct('pk').order_by('-dtime').values( 'questionanswer', 'pk')): if i['questionanswer']: q_ids[i['pk']] = i['questionanswer'] questions = News.objects.select_related( 'autor', 'language').filter(pk__in=q_ids.keys()).order_by('-dtime') page = request.GET.get('page') try: page = int(page) except (ValueError, TypeError): page = 1 p, page = pagi(page, questions, 8) langs = [l[0] for l in settings.LANGUAGES] profiles = [] ids = [] translation = {} for i in p.object_list: profiles.append(i.autor) ids.append(i.id) default_langs = {} for lg in langs: default_langs[lg] = False translation[i.id] = default_langs translation[i.id][i.language.code] = True for i in list( News.objects.filter(translation_for__in=ids).values( 'translation_for', 'language__code')): translation[i['translation_for']][i['language__code']] = True peoples = org_peoples(set(profiles), True) tags_list = set( list( NewsTags.objects.filter(news__reader_type='22').values_list( 'name', flat=True))) tags_all = {} for i in list( NewsTags.objects.filter(news__pk__in=ids).values( 'name', 'news__pk')): if not tags_all.get(i['news__pk']): tags_all[i['news__pk']] = [] tags_all[i['news__pk']].append(i['name']) data = [] for i in p.object_list: author = peoples.get(i.autor.user_id) tags = tags_all.get(i.id) text = cut_description(i.text, True, 60) qid = q_ids.get(i.id) translation_languages = translation.get(i.id, []) data.append({ 'id': qid, 'dtime': i.dtime, 'subject': i.title, 'text': text, 'author': author, 'tags': tags, 'lang': i.language.code, 'translation': translation_languages, }) vid = 99 if request.domain == '0.0.1:8000' else 95 return render_to_response('imiagroup/question_answer.html', { 'vid': vid, 'data': data, 'p': p, 'page': page, 'tags_list': tags_list, 'lang': lang, 'qtype': 'admin', 'qa_list_type': 'questions' }, context_instance=RequestContext(request))
def index(request): current_site = request.current_site subdomain = request.subdomain if subdomain not in ('yalta', 'yalta2', 'orsk', 'memoirs'): raise Http404 filter = {'site': current_site} if not request.user.is_superuser and not request.is_admin: filter['visible'] = True reader_type = (8, 11, 12) tag = request.GET.get('tag') if tag == 'news': tag = 'новост' elif tag == 'recomm': tag = 'рекомендац' elif tag == 'review': tag = 'отзыв' reader_type = (8,) elif tag == 'announce': tag = 'анонс' elif tag == 'offer': tag = 'предло' reader_type = (11,) elif tag == 'advert': tag = 'спрос' reader_type = (12,) else: tag = None if tag: filter['tags__name__icontains'] = tag if subdomain == 'memoirs': news = News.objects.select_related('autor').filter(Q(subdomain=subdomain, reader_type=None), **filter).order_by('-dtime') else: news = News.objects.select_related('autor').filter(Q(subdomain=subdomain) | Q(world_pub=True), Q(reader_type__in=reader_type) | Q(reader_type=None), **filter).order_by('-dtime') page = request.GET.get('page') try: page = int(page) except (ValueError, TypeError): page = 1 p, page = pagi(page, news, 8) news_data = [] for ind, i in enumerate(p.object_list): ''' description = BeautifulSoup(i.text, from_encoding='utf-8').text.strip().split()[:20] description = ' '.join(description) description = '%s ...' % description ''' description_orig = BeautifulSoup(i.text, from_encoding='utf-8').text.strip() description = description_orig[:130] try: last_word = description_orig[130:] if last_word[0] == ' ': last_word = ' ' + last_word.split()[0] else: last_word = last_word.split()[0] except IndexError: last_word = '' description = '%s%s ...' % (description, last_word) even = True if ind % 2 == 0 else False video = True if i.video else False if i.autor: autor = org_peoples([i.autor])[0] if i.autor_nick == 1: if i.autor.user.first_name: autor['fio'] = i.autor.user.first_name autor['show'] = '2' elif i.autor_nick == 2: autor['fio'] = '' autor['short_name'] = '' else: autor = {'fio': '','short_name': ''} news_data.append({'obj': i, 'description': description, 'even': even, 'video': video, 'autor': autor}) city_name = '' if subdomain in ('yalta', 'yalta2'): city_name = 'Ялта' elif subdomain == 'orsk': city_name = 'Орск' elif subdomain == 'memoirs': city_name = 'Мои Истории' main_description = '%s в сети интернет: новости, анонсы, рекомендации, отзывы, предложения и спрос' % city_name if subdomain == 'memoirs': main_description = '' return render_to_response('news/main.html', {'news_data': news_data, 'p': p, 'page': page, 'main_description': main_description, 'city_name': city_name, 'tag': tag}, context_instance=RequestContext(request))