Ejemplo n.º 1
0
def update_feed_scrap( request, user, scrap_id ):
    feed_scrap = FeedScrap.get_by_id( int(scrap_id) )
    project = feed_scrap.project
    
    if project.user != user:
        return HttpResponseForbidden( "<html><body>That doesn't belong to you</body></html>" ) 
    
    logging.info( feed_scrap )
    
    parse_attempt = feedparser.parse(feed_scrap.content)
    for entry in parse_attempt.entries:
        if 'guid' in entry and 'link' in entry and ('updated' in entry or 'published' in entry):
            if 'published' in entry:
                created = datetime.datetime( *entry.published_parsed[:6] )
            elif 'updated' in entry:
                created = datetime.datetime( *entry.updated_parsed[:6] )
                
            if FeedItemScrap.all().filter("project =", project).filter("guid =", entry.guid).count()==0:
                feed_item_scrap = FeedItemScrap( content=entry.link,
                                                 project=project,
                                                 created=created,
                                                 creator=user,
                                                 icon=None,
                                                 feed=feed_scrap,
                                                 guid=entry.guid )
                feed_item_scrap.put()
                logging.info( feed_item_scrap )
    
    return HttpResponseRedirect( "/" )
Ejemplo n.º 2
0
def add_scrap(request, user):
    
    if not ('content' and 'projectid' in request.POST):
        return HttpResponseServerError( "You must include both content and a project id" )
    
    logging.info( str( request.POST ) );
    
    # get project_id
    projectid = int(request.POST['projectid'])
    
    # get project
    project = Project.get_by_id( projectid )
    
    if project is None:
        return HttpResponseNotFound( "Could not find project with id %s"%projectid )
    
    # project needs to be owned by the current user
    if project.user != user:
        return HttpResponseForbidden( "This project is owned by %s. You are %s. They're not the same."%(project.user, user) )
    
    # scrap content needs to be non-blank
    scrap_content = request.POST['content']
    if scrap_content.strip()=="":
        return HttpResponseServerError( "The scrap content needs to have <i>characters</i>" )
    
    # if it's a URL, file it away as a LinkScrap
    parsed_url = urlparse.urlparse( scrap_content )
    if parsed_url[0]!="" and parsed_url[1]!="":
        # get favicon, if possible
        favicon_url = parsed_url[0]+"://"+parsed_url[1]+"/favicon.ico"
        try:
            favicon_resp = urlfetch.fetch(favicon_url)
            
            if favicon_resp.status_code == 200:
                favicon = favicon_resp.content
            else:
                favicon = None
        except DownloadError:
            favicon = None
            
        # if it parses as a feed, file it away as a feed scrap
        parse_attempt = feedparser.parse(scrap_content)
        if parse_attempt.version != "":
            # if we're not already subscribed to this feed
            if FeedScrap.all().filter("content =", scrap_content).filter("project =", project).count()!=0:
                return HttpResponseServerError( "This feed has already been added to this project.Z" )
            
            scrap = FeedScrap( content = scrap_content, project=project, created=datetime.datetime.now(), creator=user, icon=favicon )
            scrap.put()
            
            for entry in parse_attempt.entries:
                if 'guid' in entry and 'link' in entry and ('updated' in entry or 'published' in entry):
                    if 'published' in entry:
                        created = datetime.datetime( *entry.published_parsed[:6] )
                    elif 'updated' in entry:
                        created = datetime.datetime( *entry.updated_parsed[:6] )
                        
                        
                    if FeedItemScrap.all().filter("project=", project).filter("guid =", entry.guid).count()==0:
                        feed_item_scrap = FeedItemScrap( content=entry.link,
                                                         project=project,
                                                         created=created,
                                                         creator=user,
                                                         icon=favicon,
                                                         feed=scrap,
                                                         guid=entry.guid )
                        feed_item_scrap.put()
                        logging.info( feed_item_scrap )
                                                     
        else:
            scrap = LinkScrap( content = scrap_content, project=project, created=datetime.datetime.now(), creator=user, icon=favicon )
            scrap.put()
    else:
        scrap = Scrap( content = scrap_content, project=project, creator=user, created=datetime.datetime.now() )
        scrap.put()
    
    project.updated = datetime.datetime.now()
    project.put()
        
    return render_to_response( "includes/scrap_div.html", {'scrap':scrap} )