def post(self, request, format=None):
        url = request.data['url']
        star = request.data['star']

        short_url = shorten_url(url)

        try:
            p = Page.objects.get(url=short_url, owned_by=request.user)
        except Page.DoesNotExist:
            page_title = request.data['title']

            if page_title == '':
                page_title = 'No Title'

            base_url = urlparse(url).netloc

            p = Page(title=page_title,
                     url=short_url,
                     domain=base_url,
                     owned_by=user)

        p.star = star
        p.save()

        serializer = PageSerializer(p)
        return Response(serializer.data)
    def post(self, request, format=None):
        cat = request.data['category']
        url = request.data['url']

        short_url = shorten_url(url)

        try:
            p = Page.objects.get(url=short_url, owned_by=request.user)
        except Page.DoesNotExist:
            raise Http404

        c = Category.objects.get(title=cat, owned_by=request.user)

        p.categories.remove(c)

        #update category keywords
        if p.keywords != '{}':
            page_keywords = Counter(json.loads(p.keywords))
            cat_keywords = Counter(json.loads(c.keywords))

            new_cat_keywords = cat_keywords - page_keywords
            c.keywords = json.dumps(new_cat_keywords)
            c.num_pages = c.num_pages - 1
            c.save()

        pv = p.pagevisit_set.last()
        setattr(p, 'last_visited', pv.visited)
        setattr(p, 's3', pv.s3)
        setattr(p, 'preview', pv.preview)

        serializer = PageInfoSerializer(p)
        return Response(serializer.data)
    def post(self, request, format=None):
        url = request.data['url']
        note = request.data['note']

        short_url = shorten_url(url)

        try:
            p = Page.objects.get(url=short_url, owned_by=request.user)
        except Page.DoesNotExist:
            page_title = request.data['title']

            if page_title == '':
                page_title = 'No Title'

            base_url = urlparse(url).netloc

            p = Page(title=page_title,
                     url=short_url,
                     domain=base_url,
                     owned_by=request.user)
            p.save()

        p.note = note
        p.save()

        pv = p.pagevisit_set.last()
        if pv:
            setattr(p, 'last_visited', pv.visited)
            setattr(p, 's3', pv.s3)
            setattr(p, 'preview', pv.preview)
        else:
            setattr(p, 'last_visited', None)
            setattr(
                p, 's3',
                'https://s3.us-east-2.amazonaws.com/hindsite-production/404_not_found.html'
            )
            setattr(
                p, 'preview',
                'https://s3.us-east-2.amazonaws.com/hindsite-production/default-image.jpg'
            )

        serializer = PageInfoSerializer(p)
        return Response(serializer.data)
Esempio n. 4
0
    def post(self, request, format=None):
        cu = request.user

        url = request.data['url']

        base_url = urlparse(url).netloc

        if is_blacklisted(cu, base_url):
            return Response(
                {
                    'status': 'Blacklist',
                    'message': 'This page is blacklisted.'
                },
                status=status.HTTP_201_CREATED)

        short_url = shorten_url(url)

        c = cu.category_set.all()

        holder = {'categories': c, 'tracking': request.user.tracking_on}

        try:
            p = Page.objects.get(url=short_url, owned_by=cu)
        except Page.DoesNotExist:
            holder['page'] = None
            send = PopupInfoSerializer(holder)
            return Response(send.data, status=status.HTTP_404_NOT_FOUND)

        checked = p.categories.all()
        ordered_score = calc_cat_score(c, p, checked)

        holder['categories'] = ordered_score
        holder['page'] = p

        send = PopupInfoSerializer(holder)

        return Response(send.data)
    def post(self, request, format=None):
        url = request.data['url']

        url = request.data['url']
        base_url = urlparse(url).netloc

        if is_blacklisted(request.user, base_url):
            return Response(
                {
                    'status': 'Blacklist',
                    'message': 'This page is blacklisted.'
                },
                status=status.HTTP_204_NO_CONTENT)

        short_url = shorten_url(url)

        try:
            p = Page.objects.get(url=short_url, owned_by=request.user)
        except Page.DoesNotExist:
            raise Http404

        page = PageSerializer(p)

        return Response(page.data)
Esempio n. 6
0
def create_page_login(user, url, base_url, t_id, page_title, domain_title,
                      favicon, html, image, prev_tab, active):

    if favicon == '':
        fav_d = Domain.objects.filter(base_url=base_url).exclude(
            favicon='').last()
        if fav_d:
            favicon = fav_d.favicon

    # Get the currently active TimeActive (can only be one if exists)
    ta = TimeActive.objects.filter(end__isnull=True, owned_by=user)

    # Check if a tab exists with this id that is open in this session
    t = Tab.objects.filter(tab_id=t_id, closed__isnull=True, owned_by=user)
    if t.exists():
        t = t[0]
    else:
        if ta.exists() and active:
            ta = ta.first()
            ta.end = timezone.now()
            ta.save()

        if ('https://goo.gl/' not in url and 'hindsite-local' not in url
                and 'hindsite-production' not in url and 'chrome://' not in url
                and 'file:///' not in url
                and 'chrome-extension://' not in url):
            t = Tab(tab_id=t_id, owned_by=user)
            t.save()
        else:
            return False

    domains = t.domain_set.all()

    if domains.filter(base_url=base_url, closed__isnull=True).exists():
        d = domains.get(base_url=base_url, closed__isnull=True)
        if favicon != '' and favicon != d.favicon:
            d.favicon = favicon
            d.save()
    else:
        close_domain = domains.filter(closed__isnull=True)

        if close_domain.exists():
            close_domain = close_domain[0]
            if ta.exists():
                ta = ta.first()
                ta.end = timezone.now()
                ta.save()
            close_domain.closed = timezone.now()
            close_domain.save()

        if ('https://goo.gl/' not in url and 'hindsite-local' not in url
                and 'hindsite-production' not in url and 'chrome://' not in url
                and 'file:///' not in url
                and 'chrome-extension://' not in url):
            created = False
            if t_id != prev_tab:
                prev_t = Tab.objects.filter(tab_id=prev_tab,
                                            closed__isnull=True,
                                            owned_by=user)
                if prev_t.exists():
                    prev_t = prev_t.first()
                    prev_d = prev_t.domain_set.filter(closed__isnull=True)
                    if prev_d.exists():
                        prev_d = prev_d.first()
                        d = Domain(title=domain_title,
                                   tab=t,
                                   base_url=base_url,
                                   favicon=favicon,
                                   opened_from_domain=prev_d,
                                   opened_from_tabid=prev_tab,
                                   owned_by=user)
                        d.save()
                        created = True

            if not created:
                d = Domain(title=domain_title,
                           tab=t,
                           base_url=base_url,
                           favicon=favicon,
                           owned_by=user)
                d.save()
            if active:
                new_ta = TimeActive(owned_by=user)
                new_ta.save()
                d.active_times.add(new_ta)
        else:
            return False

    short_url = shorten_url(url)

    p = Page.objects.filter(url=short_url, owned_by=user)

    if p.exists():
        p = p[0]
        if p.title != page_title:
            p.title = page_title
            p.save()
    else:
        p = Page(title=page_title,
                 url=short_url,
                 domain=base_url,
                 owned_by=user)
        p.save()

    pv = PageVisit(page=p, domain=d, owned_by=user)

    session = user.session_set.filter(active=True)

    if session.exists():
        session = session.first()
        if session.end:
            if session.end < timezone.now():
                pv.session = session
            else:
                session.active = False
                session.save()
        else:
            pv.session = session

    pv.save()

    if len(html) > 0:
        aws_loc = str(user.pk) + '/' + str(pv.pk) + '.html'

        # No encryption
        # settings.S3_CLIENT.put_object(Bucket=settings.AWS_STORAGE_BUCKET_NAME,
        #                             Key=aws_loc, Body=html, ContentType='text/html')

        # Encryption
        settings.S3_CLIENT.put_object(Bucket=settings.AWS_STORAGE_BUCKET_NAME,
                                      Key=aws_loc,
                                      Body=html,
                                      SSECustomerKey=user.key,
                                      SSECustomerAlgorithm='AES256',
                                      ContentType='text/html')

        pv.s3 = settings.AWS_BUCKET_URL + aws_loc

    if len(image) > 0:
        img_loc = str(user.pk) + '/' + str(pv.pk) + '.jpg'

        bits = base64.b64decode(image)

        settings.S3_CLIENT.put_object(Bucket=settings.AWS_STORAGE_BUCKET_NAME,
                                      Key=img_loc,
                                      Body=bits,
                                      SSECustomerKey=user.key,
                                      SSECustomerAlgorithm='AES256',
                                      ContentType='image/jpeg')

        pv.preview = settings.AWS_BUCKET_URL + img_loc

    pv.save()

    content = strip_tags(html)

    data = create_data(pv, content)

    uri = settings.SEARCH_BASE_URI + 'pagevisits/pagevisit/' + str(pv.id)

    requests.put(uri, data=data)

    update_stats(user, pv)

    page = PageSerializer(p)

    return page
    def post(self, request, format=None):
        cat = request.data['category']
        url = request.data['url']

        if 'color' in request.data.keys():
            color = request.data['color']
        else:
            color = '#F8A055'

        short_url = shorten_url(url)

        try:
            p = Page.objects.get(url=short_url, owned_by=request.user)
        except Page.DoesNotExist:
            page_title = request.data['title']

            if page_title == '':
                page_title = 'No Title'

            base_url = urlparse(url).netloc

            p = Page(title=page_title,
                     url=short_url,
                     domain=base_url,
                     owned_by=request.user)
            p.save()

        c = Category.objects.filter(title__iexact=cat, owned_by=request.user)

        if c.exists():
            p.categories.add(c.first())
        else:
            c = Category(title=cat, owned_by=request.user, color=color)
            c.save()
            p.categories.add(c)

        #updating keywords
        if p.keywords != '{}':
            cat = c.first()
            page_keywords = Counter(json.loads(p.keywords))
            cat_keywords = Counter(json.loads(cat.keywords))

            new_cat_keywords = page_keywords + cat_keywords
            cat.keywords = json.dumps(new_cat_keywords)
            cat.num_pages = cat.num_pages + 1
            cat.save()

        pv = p.pagevisit_set.last()
        if pv:
            setattr(p, 'last_visited', pv.visited)
            setattr(p, 's3', pv.s3)
            setattr(p, 'preview', pv.preview)
        else:
            setattr(p, 'last_visited', None)
            setattr(
                p, 's3',
                'https://s3.us-east-2.amazonaws.com/hindsite-production/404_not_found.html'
            )
            setattr(
                p, 'preview',
                'https://s3.us-east-2.amazonaws.com/hindsite-production/default-image.jpg'
            )

        serializer = PageInfoSerializer(p)
        return Response(serializer.data)