Example #1
0
 def __init__(self,
              title,
              startDate,
              endDate,
              creator,
              text=u'',
              location=u'',
              attendees=[],
              contact_name=u'',
              contact_email=u'',
              calendar_category=u''):
     Folder.__init__(self)
     self.title = unicode(title)
     self.startDate = startDate
     self.endDate = endDate
     self.location = location
     self.attendees = attendees
     self.contact_name = contact_name
     self.contact_email = contact_email
     self.creator = unicode(creator)
     self.modified_by = self.creator
     if text is None:
         self.text = u''
     else:
         self.text = unicode(text)
     self.calendar_category = calendar_category
     self['attachments'] = AttachmentsFolder()
Example #2
0
def evolve(context):
    """
    Upgrades required for new Image Drawer functionality.
    """
    # Add IImage marker to instances of ICommunityFile which are images.
    catalog = find_catalog(context)
    search = ICatalogSearch(context)
    cnt, docids, resolver = search(interfaces=[ICommunityFile])
    for docid in docids:
        obj = resolver(docid)
        if obj is None:
            continue  # Work around catalog bug
        obj._init_image()
        if obj.is_image:
            print "Image: %s" % resource_path(obj)
            catalog.reindex_doc(obj.docid, obj)

    # Convert WikiPages to Folders so they can contain attachments
    cnt, docids, resolver = search(interfaces=[IWikiPage])
    for docid in docids:
        obj = resolver(docid)
        if obj is None:
            continue  # Work around catalog bug
        print "Convert wiki page to folder: %s" % resource_path(obj)
        Folder.__init__(obj)
        catalog.reindex_doc(obj.docid, obj)
Example #3
0
def evolve(context):
    """
    Upgrades required for new Image Drawer functionality.
    """
    # Add IImage marker to instances of ICommunityFile which are images.
    catalog = find_catalog(context)
    search = ICatalogSearch(context)
    cnt, docids, resolver = search(interfaces=[ICommunityFile])
    for docid in docids:
        obj = resolver(docid)
        if obj is None:
            continue  # Work around catalog bug
        obj._init_image()
        if obj.is_image:
            print "Image: %s" % resource_path(obj)
            catalog.reindex_doc(obj.docid, obj)

    # Convert WikiPages to Folders so they can contain attachments
    cnt, docids, resolver = search(interfaces=[IWikiPage])
    for docid in docids:
        obj = resolver(docid)
        if obj is None:
            continue # Work around catalog bug
        print "Convert wiki page to folder: %s" % resource_path(obj)
        Folder.__init__(obj)
        catalog.reindex_doc(obj.docid, obj)
Example #4
0
 def __init__(self, title, description, creator):
     Folder.__init__(self)
     self.title = unicode(title)
     self.description = unicode(description)
     self.creator = unicode(creator)
     self.modified_by = self.creator
     self.ordering = Ordering()
Example #5
0
 def __init__(self, title, text, description, creator):
     Folder.__init__(self)
     assert title is not None
     assert text is not None
     assert description is not None
     assert creator is not None
     self.title = unicode(title)
     self.text = unicode(text)
     self.description = unicode(description)
     self.creator = unicode(creator)
     self.modified_by = self.creator
     # We might choose to make this more article-ish in KARL3
     self["attachments"] = AttachmentsFolder()
Example #6
0
File: page.py Project: iotest3/new
 def __init__(self, title, text, description, creator):
     Folder.__init__(self)
     assert title is not None
     assert text is not None
     assert description is not None
     assert creator is not None
     self.title = unicode(title)
     self.text = unicode(text)
     self.description = unicode(description)
     self.creator = unicode(creator)
     self.modified_by = self.creator
     # We might choose to make this more article-ish in KARL3
     self['attachments'] = AttachmentsFolder()
Example #7
0
    def test_adding_with_subobject(self):
        root = Folder()
        folder = Folder()
        folder['1'] = Folder()
        folder['1']['2'] = Folder()
        #Add subscriber
        L = []

        def subscriber(context, event):
            L.append([context, event])

        self.config.add_subscriber(subscriber, [IFolder, IObjectAddedEvent])
        root['f'] = folder
        self.assertEqual(len(L), 3)
Example #8
0
    def test_removing_with_subobject(self):
        root = Folder()
        root['1'] = Folder()
        root['1']['2'] = Folder()
        root['1']['2']['3'] = Folder()
        #Add subscriber
        L = []

        def subscriber(context, event):
            L.append([context, event])

        self.config.add_subscriber(subscriber,
                                   [IFolder, IObjectWillBeRemovedEvent])
        del root['1']
        self.assertEqual(len(L), 3)
Example #9
0
    def add_folder(self):
        POST = self.request.POST
        if 'form.submitted' in POST:
            title = POST.get('title')
            author = POST.get('author')
            if title and author:
                # Valid form, add some data
                folder = Folder()
                folder.title = title
                folder.author = author
                folder.type = 'folder'
                folder.modified = _now()
                self.context[_title_to_name(title, self.context)] = folder
                return Response('ok')

        return render_to_response('templates/add_folder.pt', {})
Example #10
0
    def add_file(self):
        #XXX this needs to change to use a real File type, with an uploaded
        #    body.
        POST = self.request.POST
        if 'form.submitted' in POST:
            title = POST.get('title')
            author = POST.get('author')
            if title and author:
                # Valid form, add some data
                folder = Folder()
                folder.title = title
                folder.author = author
                folder.type = 'folder'
                folder.modified = _now()
                self.context[_title_to_name(title, self.context)] = folder
                return Response('ok')

        return render_to_response('templates/add_file.pt', {})
Example #11
0
    def make_story(self, key, data=None, links=True):
        return Story(self.server.issue(key, expand='changelog'))

        if not links:
            return story
        transaction.begin()
        for link in data['fields']['issuelinks']:
            if link.has_key('outwardIssue'):
                type = link['type']['name']
                key = link['outwardIssue']['key']
                if not type in story['links']['out'].keys():
                    story['links']['out'][type] = Folder()
                    story['links']['out'][type].key = type
                    transaction.commit()
                    s = self.get_story(key)
                    if not s:
                        continue
                    story['links']['out'][type][key] = s
                else:
                    if not key in story['links']['out'][type].keys():
                        s = self.get_story(key)
                        if not s:
                            continue
                        story['links']['out'][type][key] = s
            elif link.has_key('inwardIssue'):
                type = link['type']['name']
                key = link['inwardIssue']['key']
                if not type in story['links']['in'].keys():
                    story['links']['in'][type] = Folder()
                    story['links']['in'][type].key = type
                    transaction.commit()
                    s = self.get_story(key)
                    if not s:
                        continue
                    story['links']['in'][type][key] = s
                else:
                    if not key in story['links']['in'][type].keys():
                        s = self.get_story(key)
                        if not s:
                            continue
                        story['links']['in'][type][key] = s
        transaction.commit()
        return story
Example #12
0
 def __init__(self, title, startDate, endDate, creator,
              text=u'', location=u'', attendees=[],
              contact_name = u'', contact_email = u'',
              calendar_category=u''):
     Folder.__init__(self)
     self.title = unicode(title)
     self.startDate = startDate
     self.endDate = endDate
     self.location = location
     self.attendees = attendees
     self.contact_name = contact_name
     self.contact_email = contact_email
     self.creator = unicode(creator)
     self.modified_by = self.creator
     if text is None:
         self.text = u''
     else:
         self.text = unicode(text)
     self.calendar_category = calendar_category
     self['attachments'] = AttachmentsFolder()
Example #13
0
    def __init__(self):
        super(PeopleDirectory, self).__init__()
        self['categories'] = Folder()  # {id: PeopleCategory}
        self.catalog = CachingCatalog()
        self.catalog.document_map = DocumentMap()
        self.update_indexes()

        # Set up a default configuration
        self['all'] = section = PeopleSection('All')
        section['all'] = report = PeopleReport('All')
        report.columns = ('name', 'organization', 'location', 'email')
        self.order = ['all']
Example #14
0
    def __init__(self, url):
        """Initialize the topic and it's timestamp/content.
        Verification happens afterward.
        """

        # Do some basic sanity checks

        pieces = urlparse(url)

        if not (pieces.scheme and pieces.netloc and pieces.path):
            raise ValueError

        self.url = url
        self.timestamp = None
        self.content_type = ''
        self.content = None
        self.changed = False
        self.subscribers = Folder()
        self.subscriber_count = 0
        self.last_pinged = None
        self.failed = False
        self.ping()
Example #15
0
def generate_metrics(root, year, month):
    contenttype = metrics.collect_contenttype_metrics(root, year, month)
    profiles = metrics.collect_profile_metrics(root, year, month)
    users = metrics.collect_user_metrics(root, year, month)
    communities = metrics.collect_community_metrics(root, year, month)

    metrics_folder = find_or_create_metrics_container(root)

    year_folder = metrics_folder.get(str(year), None)
    if year_folder is None:
        year_folder = Folder()
        alsoProvides(year_folder, IMetricsYearFolder)
        metrics_folder[str(year)] = year_folder

    month_folder = year_folder.get(month_string(month), None)
    if month_folder is None:
        month_folder = Folder()
        alsoProvides(month_folder, IMetricsMonthFolder)
        year_folder[month_string(month)] = month_folder

    month_folder.contenttypes = OOBTree(contenttype)
    month_folder.profiles = OOBTree(profiles)
    month_folder.users = OOBTree(users)
    month_folder.communities = OOBTree(communities)
Example #16
0
 def test_edit_w_date_w_editor_w___parent__(self):
     from datetime import datetime
     from datetime import timedelta
     from repoze.folder import Folder
     from .. import models
     parent = Folder()
     BEFORE = datetime.now()
     AFTER = BEFORE + timedelta(1)
     author = _User()
     editor = _User()
     with _Monkey(models, _NOW=BEFORE):
         parent['testing'] = post = self._makeOne(author, 'TEXT')
     post.edit('NEW TEXT', editor, AFTER)
     self.failUnless(post.editor is editor)
     self.assertEqual(post.modified, AFTER)
     self.assertEqual(author.points, 50)
     self.assertEqual(editor.points, 20)
     self.assertEqual(editor.badges, {'editor': [None]})
     self.failUnless('hotness' in parent.__dict__)
Example #17
0
def sample_folder(multiplier):
    from repoze.folder import Folder
    from zope.interface import alsoProvides
    from osi.interfaces import IMetricsMonthFolder
    f = Folder()
    alsoProvides(f, IMetricsMonthFolder)
    f.contenttypes = sample_content_types(multiplier)
    username = u'marty' if multiplier % 2 == 0 else u'mcfly'
    f.profiles = sample_profiles(username, multiplier)
    commid, commtitle = (('delorean', 'Delorean') if multiplier % 2 == 0
                         else ('flux', 'Capacitor'))
    f.communities = sample_communities(commid, commtitle, multiplier)
    f.users = sample_users(multiplier)
    return f
Example #18
0
    def __init__(self, url):
        """Initialize the topic and it's timestamp/content.
        Verification happens afterward.
        """

        # Do some basic sanity checks

        pieces = urlparse(url)

        if not (pieces.scheme and pieces.netloc and pieces.path):
            raise ValueError

        self.url = url
        self.timestamp = None
        self.content_type = ''
        self.content = None
        self.changed = False
        self.subscribers = Folder()
        self.subscriber_count = 0
        self.last_pinged = None
        self.failed = False
        self.ping()
Example #19
0
 def __init__(self):
     super(Plungyr, self).__init__()
     self['profiles'] = Folder()
Example #20
0
 def add(self, registry, name, obj):
     ## We fire events ourselves because 'repoze.folder' uses the
     ## global ZCA registry. Here we use Pyramid registry instead.
     res = BaseFolder.add(self, name, obj, send_events=False)
     registry.notify(ObjectAddedEvent(obj, self, name))
     return res
Example #21
0
 def make_story(self, key, data, links=True):
     if not data['fields']['fixVersions']:
         return None
     transaction.begin()
     story = Story(key)
     story.id = int(data['id'])
     story.history = History(data['changelog'])
     story.url = data['self']
     story.title = data['fields']['summary']
     story.fix_versions = PersistentList()
     for version in data['fields']['fixVersions']:
         story.fix_versions.append(version['name'])
     story.fix_version = data['fields']['fixVersions']
     story.created = datetime.datetime.fromtimestamp(time.mktime(
         time.strptime(data['fields']['created'][:23],
         '%Y-%m-%dT%H:%M:%S.%f')))
     story.type = data['fields']['issuetype']['id']
     story.assignee = data['fields']['assignee']
     story.developer = data['fields']['customfield_13435']
     story.rank = data['fields']['customfield_12242']
     if 'customfield_10722' in data['fields'] and data['fields'][
         'customfield_10722']:
         story.root_cause = data['fields']['customfield_10722'][
             'value'].strip()
     else:
         story.root_cause = ''
     if 'customfield_13330' in data['fields'] and data['fields'][
         'customfield_13330']:
         story.root_cause_details = data['fields']['customfield_13330']
     else:
         story.root_cause_details = ''
     story.scrum_team = None
     if data['fields'].has_key('customfield_11261'):
         if data['fields']['customfield_11261']:
             story.scrum_team = data['fields']['customfield_11261'][
                 'value'].strip()
     else:
         story.scrum_team = None
     story.points = None
     if data['fields'].has_key('customfield_10792'):
         story.points = data['fields']['customfield_10792']
     if story.points:
         story.points = int(story.points)
     story.status = int(data['fields']['status']['id'])
     story.project = data['fields']['project']['key']
     if not story.project in self.cache.data:
         self.cache.data[story.project] = Project(story.project,
             data['fields']['project']['name'])
     project = self.cache.data[story.project]
     for version in story.fix_versions:
         if version in project.keys():
             if not project[version].has_key(story.key):
                 project[version][story.key] = story
         else:
             release = Release()
             release.version = version
             release[story.key] = story
             project[version] = release
     transaction.commit()
     transaction.begin()
     docid = self.cache.document_map.add(
         ['jira', story.project, story.fix_versions[0], story.key])
     self.cache.catalog.index_doc(docid, story)
     transaction.commit()
     if not links:
         return story
     transaction.begin()
     for link in data['fields']['issuelinks']:
         if link.has_key('outwardIssue'):
             type = link['type']['name']
             key = link['outwardIssue']['key']
             if not type in story['links']['out'].keys():
                 story['links']['out'][type] = Folder()
                 story['links']['out'][type].key = type
                 transaction.commit()
                 s = self.get_story(key)
                 if not s:
                     continue
                 story['links']['out'][type][key] = s
             else:
                 if not key in story['links']['out'][type].keys():
                     s = self.get_story(key)
                     if not s:
                         continue
                     story['links']['out'][type][key] = s
         elif link.has_key('inwardIssue'):
             type = link['type']['name']
             key = link['inwardIssue']['key']
             if not type in story['links']['in'].keys():
                 story['links']['in'][type] = Folder()
                 story['links']['in'][type].key = type
                 transaction.commit()
                 s = self.get_story(key)
                 if not s:
                     continue
                 story['links']['in'][type][key] = s
             else:
                 if not key in story['links']['in'][type].keys():
                     s = self.get_story(key)
                     if not s:
                         continue
                     story['links']['in'][type][key] = s
     transaction.commit()
     return story
Example #22
0
 def __init__(self):
     super(Links, self).__init__()
     self['out'] = Folder()
     self['in'] = Folder()
     self['out'].key = 'out'
     self['in'].key = 'in'
Example #23
0
 def pull_issues(self, boards, links, time_range, all_issues, add_only,
     issue_types):
     import pdb; pdb.set_trace()
     store = self.cache.data['issues']
     if all_issues:
         issues = self.server.search_issues('', maxResults=0)
         print 'Refreshing all issues...'
     elif time_range:
         issues = self.server.search_issues('updated > "%s"' % time_range
             , maxResults=0)
         print 'Pulled stories from %s previous' % time_range
     count = len(issues)
     print 'Importing', len(issues), 'stories...'
     for issue in issues:
         if store.has_key(issue.key):
             if add_only:
                 continue
             print count, 'Updating:', issue.key
             transaction.begin()
             story = store[issue.key]
             story.initialize(self.server.issue(
                 issue.key, expand='changelog'))
             transaction.commit()
         else:
             print count, 'Adding:  ', issue.key
             story = Story(self.server.issue(issue.key, expand='changelog'))
             transaction.begin()
             store[story.key] = story
             transaction.commit()
             docid = self.cache.document_map.add(
                 ['jira', 'issues', story.key])
             transaction.begin()
             self.cache.catalog.index_doc(docid, story)
             transaction.commit()
         count = count - 1
         for link in issue.fields.issuelinks:
             transaction.begin()
             if hasattr(link, 'outwardIssue') and link.outwardIssue:
                 type = link.type.name
                 key = link.outwardIssue.key
                 if not type in story['links']['out'].keys():
                     story['links']['out'][type] = Folder()
                     story['links']['out'][type].key = type
                     transaction.commit()
                     if self.cache.data['issues'].has_key(key):
                         s = self.cache.data['issues'][key]
                     else:
                         transaction.begin()
                         s = Story(self.server.issue(key,expand='changelog'))
                         self.cache.data['issues'][key] = s
                         transaction.commit()
                         transaction.begin()
                         docid = self.cache.document_map.add(
                             ['jira', 'issues', key])
                         self.cache.catalog.index_doc(docid, s)
                         transaction.commit()
                     if not s:
                         continue
                     story['links']['out'][type][key] = s
                 else:
                     if not key in story['links']['out'][type].keys():
                         if self.cache.data['issues'].has_key(key):
                             s = self.cache.data['issues'][key]
                         else:
                             transaction.begin()
                             s = Story(self.server.issue(key,expand='changelog'))
                             self.cache.data['issues'][key] = s
                             transaction.commit()
                             transaction.begin()
                             docid = self.cache.document_map.add(
                                 ['jira', 'issues', key])
                             self.cache.catalog.index_doc(docid, s)
                             transaction.commit()
                         if not s:
                             continue
                         story['links']['out'][type][key] = s
             elif hasattr(link, 'inwardIssue') and link.inwardIssue:
                 type = link.type.name
                 key = link.inwardIssue.key
                 if not type in story['links']['in'].keys():
                     story['links']['in'][type] = Folder()
                     story['links']['in'][type].key = type
                     transaction.commit()
                     if self.cache.data['issues'].has_key(key):
                         s = self.cache.data['issues'][key]
                     else:
                         transaction.begin()
                         s = Story(self.server.issue(key,expand='changelog'))
                         self.cache.data['issues'][key] = s
                         transaction.commit()
                         transaction.begin()
                         docid = self.cache.document_map.add(
                             ['jira', 'issues', key])
                         self.cache.catalog.index_doc(docid, s)
                         transaction.commit()
                     if not s:
                         continue
                     story['links']['in'][type][key] = s
                 else:
                     if not key in story['links']['in'][type].keys():
                         if self.cache.data['issues'].has_key(key):
                             s = self.cache.data['issues'][key]
                         else:
                             transaction.begin()
                             s = Story(self.server.issue(key,expand='changelog'))
                             self.cache.data['issues'][key] = s
                             transaction.commit()
                             transaction.begin()
                             docid = self.cache.document_map.add(
                                 ['jira', 'issues', key])
                             self.cache.catalog.index_doc(docid, s)
                             transaction.commit()
                         if not s:
                             continue
                         story['links']['in'][type][key] = s
             transaction.commit()
Example #24
0
class Topic(Persistent):
    implements(ITopic)

    def __repr__(self):
        return "<Topic %s>" % self.url

    def __init__(self, url):
        """Initialize the topic and it's timestamp/content.
        Verification happens afterward.
        """

        # Do some basic sanity checks

        pieces = urlparse(url)

        if not (pieces.scheme and pieces.netloc and pieces.path):
            raise ValueError

        self.url = url
        self.timestamp = None
        self.content_type = ''
        self.content = None
        self.changed = False
        self.subscribers = Folder()
        self.subscriber_count = 0
        self.last_pinged = None
        self.failed = False
        self.ping()

    def fetch(self, hub_url):
        """Fetches the content from the publisher's provided URL"""

        user_agent = "PuSH Hub (+%s; %s)" % (hub_url, self.subscriber_count)

        headers = {'User-Agent': user_agent}

        try:
            response = requests.get(self.url, headers=headers)
            self.failed = False
        except ConnectionError:
            logger.warning('Could not connect to topic URL %s' % self.url)
            self.failed = True
            return

        parsed = self.parse(response.content)

        if not parsed or parsed.bozo:
            # Should probably set a flag or log something here, too.
            raise ValueError

        if not self.content:
            newest_entries = parsed
            self.changed = True
        else:
            parsed_old = self.parse(self.content)
            # assemble_newest_entries will set changed flag if this isn't
            # the first fetch
            newest_entries = self.assemble_newest_entries(parsed, parsed_old)

        if not self.content_type:
            self.content_type = parsed.version

        if self.changed and self.content:
            self.content = self.generate_feed(newest_entries)
        else:
            self.content = response.content

        self.timestamp = datetime.now()
        logger.info('Fetched content for topic %s', self.url)

    def parse(self, content):
        """Parses a feed into a Python object"""
        if not content:
            return None
        parsed = parse(content)

        return parsed

    def ping(self):
        """Registers the last time a publisher pinged the hub for this topic.
        """
        self.last_pinged = datetime.now()

    def add_subscriber(self, subscriber):
        """Increment subscriber count so reporting on content fetch is easier.
        """
        self.subscriber_count += 1
        self.subscribers.add(subscriber.callback_url, subscriber)

    def remove_subscriber(self, subscriber):
        """Sanely remove subscribers from the count
        """
        self.subscribers.remove(subscriber.callback_url)
        if self.subscriber_count <= 0:
            raise ValueError
        self.subscriber_count -= 1

    def assemble_newest_entries(self, parsed, parsed_old):
        if not parsed or not parsed_old:
            return None
        compare = FeedComparator(parsed, parsed_old)
        new_entries = compare.new_entries()
        updated_entries = compare.updated_entries()
        metadata = compare.changed_metadata()

        if new_entries or updated_entries or metadata:
            self.changed = True

        all_entries = new_entries + updated_entries
        all_entries.sort(reverse=True, key=lambda entry: entry.updated_parsed)

        metadata['entries'] = all_entries

        return metadata

    def generate_feed(self, parsed_feed):
        self_links = [link['href'] for link
                     in parsed_feed['feed']['links']
                     if link['rel'] == u'self']
        if len(self_links) > 0:
            self_link = self_links[0]
        else:
            self_link = parsed_feed['feed']['link']

        new_feed = Atom1FeedKwargs(
            title=parsed_feed['feed']['title'],
            link=self_link,
            description=parsed_feed['feed']['link'],
            author=parsed_feed['feed'].get('author', u'Hub Aggregator')
        )
        for entry in parsed_feed.entries:
            updated = datetime.fromtimestamp(mktime(entry['updated_parsed']))

            try:
                entry['title']
            except KeyError:
                continue

            new_feed.add_item(
                entry.pop('title'),
                entry.pop('link'),
                entry.pop('summary', ''),
                pubdate=updated,
                unique_id=entry.pop('id', ''),
                author_name=entry.pop('author', ''),
                category=entry.pop('tags', []),
                **entry
            )
        string = new_feed.writeString(parsed_feed['encoding'])
        return string

    def get_request_data(self):
        """
        Return headers and body content useful for sending to a
        subscriber or listener
        """
        c_type = None
        if 'atom' in self.content_type:
            c_type = 'application/atom+xml'
        elif 'rss' in self.content_type:
            c_type = 'application/rss+xml'

        if c_type is None:
            raise ValueError(
                'Invalid content type. Only Atom or RSS are supported'
            )

        headers = {'Content-Type': c_type}
        body = self.content

        return (headers, body)

    def notify_subscribers(self):
        """
        Notify subscribers to this topic that the feed has been updated.

        This will put the following data into a queue:
            Subscriber callback URL
            The feed content type
            The updated feed entries

        The queue can process the requests as long as it has this information.
        """

        if not self.subscribers:
            return

        if not self.changed:
            return

        c_type = None
        if 'atom' in self.content_type:
            c_type = 'application/atom+xml'
        elif 'rss' in self.content_type:
            c_type = 'application/rss+xml'

        if c_type is None:
            raise ValueError(
                'Invalid content type. Only Atom or RSS are supported'
            )

        q = Queue(connection=Redis())

        headers = {'Content-Type': c_type}
        body = self.content

        for url, subscriber in self.subscribers.items():
            q.enqueue('ucla.jobs.hub.post', url, body, headers)
            logger.debug('Item placed on subscriber queue %s' % (url))

        # We've notified all of our subscribers,
        # so we can set the flag to not notify them again
        # until another change
        self.changed = False
Example #25
0
 def __init__(self, **kw):
     Folder.__init__(self)
     super(Content, self).__init__(**kw)
Example #26
0
 def __init__(self, **kw):
     Folder.__init__(self)
     super(Content, self).__init__(**kw)
Example #27
0
 def __init__(self, **kw):
     #To set that this should keep track of ordering. See Folder
     #If _order is set to None, ordering isn't stored
     self._order = ()
     Folder.__init__(self)
     super(Content, self).__init__(**kw)
Example #28
0
def create_metrics_container(context):
    site = find_site(context)
    metrics_container = Folder()
    alsoProvides(metrics_container, IMetricsContainerFolder)
    site['metrics'] = metrics_container
    return metrics_container
Example #29
0
 def remove(self, registry, name):
     ## Cf. add() about firing events ourselves.
     obj = self[name]
     registry.notify(ObjectWillBeRemovedEvent(obj, self, name))
     res = BaseFolder.remove(self, name, send_events=False)
     return res
Example #30
0
 def __init__(self, *args, **kwargs):
     BaseContent.__init__(self)
     BaseFolder.__init__(self, *args, **kwargs)
Example #31
0
class Topic(Persistent):
    implements(ITopic)

    def __repr__(self):
        return "<Topic %s>" % self.url

    def __init__(self, url):
        """Initialize the topic and it's timestamp/content.
        Verification happens afterward.
        """

        # Do some basic sanity checks

        pieces = urlparse(url)

        if not (pieces.scheme and pieces.netloc and pieces.path):
            raise ValueError

        self.url = url
        self.timestamp = None
        self.content_type = ''
        self.content = None
        self.changed = False
        self.subscribers = Folder()
        self.subscriber_count = 0
        self.last_pinged = None
        self.failed = False
        self.ping()

    def fetch(self, hub_url):
        """Fetches the content from the publisher's provided URL"""

        user_agent = "PuSH Hub (+%s; %s)" % (hub_url, self.subscriber_count)

        headers = {'User-Agent': user_agent}

        try:
            response = requests.get(self.url, headers=headers)
            self.failed = False
        except ConnectionError:
            logger.warning('Could not connect to topic URL %s' % self.url)
            self.failed = True
            return

        parsed = self.parse(response.content)

        if not parsed or parsed.bozo:
            # Should probably set a flag or log something here, too.
            raise ValueError

        if not self.content:
            newest_entries = parsed
            self.changed = True
        else:
            parsed_old = self.parse(self.content)
            # assemble_newest_entries will set changed flag if this isn't
            # the first fetch
            newest_entries = self.assemble_newest_entries(parsed, parsed_old)

        if not self.content_type:
            self.content_type = parsed.version

        if self.changed and self.content:
            self.content = self.generate_feed(newest_entries)
        else:
            self.content = response.content

        self.timestamp = datetime.now()
        logger.info('Fetched content for topic %s', self.url)

    def parse(self, content):
        """Parses a feed into a Python object"""
        if not content:
            return None
        parsed = parse(content)

        return parsed

    def ping(self):
        """Registers the last time a publisher pinged the hub for this topic.
        """
        self.last_pinged = datetime.now()

    def add_subscriber(self, subscriber):
        """Increment subscriber count so reporting on content fetch is easier.
        """
        self.subscriber_count += 1
        self.subscribers.add(subscriber.callback_url, subscriber)

    def remove_subscriber(self, subscriber):
        """Sanely remove subscribers from the count
        """
        self.subscribers.remove(subscriber.callback_url)
        if self.subscriber_count <= 0:
            raise ValueError
        self.subscriber_count -= 1

    def assemble_newest_entries(self, parsed, parsed_old):
        if not parsed or not parsed_old:
            return None
        compare = FeedComparator(parsed, parsed_old)
        new_entries = compare.new_entries()
        updated_entries = compare.updated_entries()
        logger.debug('%s new entries, %s updated entries in %s' %
                     (len(new_entries), len(updated_entries), self.url))
        metadata = compare.changed_metadata()

        if new_entries or updated_entries or metadata:
            self.changed = True

        all_entries = new_entries + updated_entries
        all_entries.sort(reverse=True, key=lambda entry: entry.updated_parsed)

        metadata['entries'] = all_entries

        return metadata

    def generate_feed(self, parsed_feed):
        self_links = [
            link['href'] for link in parsed_feed['feed']['links']
            if link['rel'] == u'self'
        ]
        if len(self_links) > 0:
            self_link = self_links[0]
        else:
            self_link = parsed_feed['feed']['link']

        new_feed = Atom1FeedKwargs(title=parsed_feed['feed']['title'],
                                   link=self_link,
                                   description=parsed_feed['feed']['link'],
                                   author=parsed_feed['feed'].get(
                                       'author', u'Hub Aggregator'))
        for entry in parsed_feed.entries:
            updated = datetime.fromtimestamp(mktime(entry['updated_parsed']))

            try:
                entry['title']
            except KeyError:
                continue

            new_feed.add_item(entry.pop('title'),
                              entry.pop('link'),
                              entry.pop('summary', ''),
                              pubdate=updated,
                              unique_id=entry.pop('id', ''),
                              author_name=entry.pop('author', ''),
                              category=entry.pop('tags', []),
                              **entry)
        string = new_feed.writeString(parsed_feed['encoding'])
        return string

    def get_request_data(self):
        """
        Return headers and body content useful for sending to a
        subscriber or listener
        """
        c_type = None
        if 'atom' in self.content_type:
            c_type = 'application/atom+xml'
        elif 'rss' in self.content_type:
            c_type = 'application/rss+xml'

        if c_type is None:
            raise ValueError(
                'Invalid content type. Only Atom or RSS are supported')

        headers = {'Content-Type': c_type}
        body = self.content

        return (headers, body)

    def notify_subscribers(self):
        """
        Notify subscribers to this topic that the feed has been updated.

        This will put the following data into a queue:
            Subscriber callback URL
            The feed content type
            The updated feed entries

        The queue can process the requests as long as it has this information.
        """

        if not self.subscribers:
            return

        if not self.changed:
            return

        c_type = None
        if 'atom' in self.content_type:
            c_type = 'application/atom+xml'
        elif 'rss' in self.content_type:
            c_type = 'application/rss+xml'

        if c_type is None:
            raise ValueError(
                'Invalid content type. Only Atom or RSS are supported')

        q = Queue(connection=Redis())

        headers = {'Content-Type': c_type}
        body = self.content

        for url, subscriber in self.subscribers.items():
            q.enqueue('ucla.jobs.hub.post', url, body, headers)
            logger.debug('Item placed on subscriber queue %s' % (url))

        # We've notified all of our subscribers,
        # so we can set the flag to not notify them again
        # until another change
        self.changed = False