Ejemplo n.º 1
0
    def roster_updated(self, item=None):
        with traceguard:
            roster = self.roster
            from util.primitives.structures import oset
            with self.root_group.frozen():
                jcontact   = self.contact_class
                buddies    = self.buddies
                root_group = self.root_group

                del root_group[:]

                groups = odict({None: self.root_group})
    #
                for item in roster:
                    #roster groups
                    i_groups = oset(filter(None, item.groups))
                    #profile group
                    p_groups = oset(filter(None, (profile.get_contact_info(self.buddies[item.jid], 'group'),)))
                    #plus default group
                    group = (p_groups | i_groups | oset([None]))[0]
                    #CAS: optimize this to not create a group every time.
                    g = groups.setdefault(group, Group(group, self, group))
                    contact = jcontact(self.buddies[item.jid](item), group)
                    g.append(contact)

                for _gid, g in groups.iteritems():
                    if not self.filter_group(g):
                        if g is not root_group:
                            root_group.append(g)
                    g[:] = [c for c in g if not self.filter_contact(c)]
Ejemplo n.º 2
0
    def roster_updated(self, item=None):
        with traceguard:
            roster = self.roster
            from util.primitives.structures import oset
            with self.root_group.frozen():
                jcontact = self.contact_class
                buddies = self.buddies
                root_group = self.root_group

                del root_group[:]

                groups = odict({None: self.root_group})
                #
                for item in roster:
                    #roster groups
                    i_groups = oset(filter(None, item.groups))
                    #profile group
                    p_groups = oset(
                        filter(None, (profile.get_contact_info(
                            self.buddies[item.jid], 'group'), )))
                    #plus default group
                    group = (p_groups | i_groups | oset([None]))[0]
                    #CAS: optimize this to not create a group every time.
                    g = groups.setdefault(group, Group(group, self, group))
                    contact = jcontact(self.buddies[item.jid](item), group)
                    g.append(contact)

                for _gid, g in groups.iteritems():
                    if not self.filter_group(g):
                        if g is not root_group:
                            root_group.append(g)
                    g[:] = [c for c in g if not self.filter_contact(c)]
Ejemplo n.º 3
0
    def on_data_changed(self, *args):
        'Invoked when the observable data list changes.'
        if wx.IsDestroyed(self):
            return

        with self.Frozen():
            sz = self.Sizer
            rows = self.rows

            oldrowcount = len(rows)
            scrolly = self.GetScrollPos(wx.VERTICAL)

            dataset = oset(self.data)
            prevset = oset(row.data for row in rows)

            oldset = prevset - dataset
            newset = dataset - prevset

            for i_, row in list(enumerate(rows)):
                sz.Detach(row)
                if row.data in oldset:
                    if i_ == self.Hovered:
                        self._hovered = -1
                    rows.remove(row)
                    row.ReleaseAllCapture()
                    row.on_close()
                    row.Destroy()

            # Create a new row control for each data element, and add it to
            # sizer.
            for elem in newset:
                control = self.row_control(self, elem)
                rows.append(control)

            idxs = {}
            for i, d in enumerate(self.data):
                idxs[d] = i
            rows.sort(key=lambda r: idxs.get(r.data))

            for row in rows:
                sz.Add(row, *self.sizer_args)

            self.Layout()

            # Restore the old scroll position.
            newrowcount = len(rows)
            if oldrowcount != 0 and oldrowcount != newrowcount:
                self.SetupScrolling(False, True, *self.scroll_sizes)

                # on MSW, scrolling immediately doesn't work.
                wx.CallAfter(self.Scroll, 0, scrolly)
Ejemplo n.º 4
0
    def on_data_changed(self, *args):
        'Invoked when the observable data list changes.'
        if wx.IsDestroyed(self):
            return

        with self.Frozen():
            sz = self.Sizer
            rows = self.rows

            oldrowcount = len(rows)
            scrolly = self.GetScrollPos(wx.VERTICAL)

            dataset = oset(self.data)
            prevset = oset(row.data for row in rows)

            oldset = prevset - dataset
            newset = dataset - prevset

            for i_, row in list(enumerate(rows)):
                sz.Detach(row)
                if row.data in oldset:
                    if i_ == self.Hovered:
                        self._hovered = -1
                    rows.remove(row)
                    row.ReleaseAllCapture()
                    row.on_close()
                    row.Destroy()

            # Create a new row control for each data element, and add it to
            # sizer.
            for elem in newset:
                control = self.row_control(self, elem)
                rows.append(control)

            idxs = {}
            for i, d in enumerate(self.data): idxs[d] = i
            rows.sort(key = lambda r: idxs.get(r.data))

            for row in rows:
                sz.Add(row, *self.sizer_args)

            self.Layout()

            # Restore the old scroll position.
            newrowcount = len(rows)
            if oldrowcount != 0 and oldrowcount != newrowcount:
                self.SetupScrolling(False, True, *self.scroll_sizes)

                # on MSW, scrolling immediately doesn't work.
                wx.CallAfter(self.Scroll, 0, scrolly)
Ejemplo n.º 5
0
    def fix_group_c8(self):
        '''
        Sometimes SSI group index ids (0xc8) are little endian instead of the
        expected big endian.

        This function attempts to guess when that is happening, and fix them.
        '''
        groups = [key for key in self if key[1] == 0]
        dgi = dict.__getitem__

        for g_id, i_id in groups:
            if not g_id:  #root group
                #find all group ssis
                members = [key for key in groups if key != (0, 0)]
                #extract group ids
                m_ids = set(x[0] for x in members)
                assert (g_id, i_id) == (0, 0)
                #get group ids from root group
                gm_ids = dgi(self, (0, 0)).tlvs.get(0xc8, [])
            else:
                #find all the ssis which match this group id
                members = [
                    key for key in self if key[0] == g_id and key[1] != 0
                ]
                #extract member item ids
                m_ids = set(x[1] for x in members)
                #grab the member ids the group thinks it has
                gm_ids = dgi(self, (g_id, i_id)).tlvs.get(0xc8, [])
                #if they're the same, move on.
            if m_ids == set(gm_ids) and len(m_ids) == len(gm_ids):
                continue

            #map the group's ids to their position in the known list.
            known_locs = dict((y, x) for x, y in enumerate(oset(gm_ids)))
            locations = {}
            for possible in m_ids:
                #for each real id:
                if possible in known_locs:
                    #if the group has a location for it, use that one.
                    locations[possible] = known_locs[possible]
                    continue
                #otherwise, see if we have an inverted location for it.
                inverted = byteswap(possible)
                #even if somehow there was a collision, they'll just be put next to each other.
                #close enough.
                if inverted in known_locs:
                    locations[possible] = known_locs[inverted]
                    continue
                #otherwise, throw it at the end.
                locations[possible] = len(m_ids)
            new_gm_ids = sorted(m_ids, key=locations.__getitem__)
            #setting this should do no harm, since the only thing that can happen to an
            #ssi is deletion or modification.  deleted doesn't matter, modified fixes this
            #on the server as well.
            dgi(self, (g_id, i_id)).tlvs[0xc8] = new_gm_ids
Ejemplo n.º 6
0
    def __init__(self, frame):
        wx.EvtHandler.__init__(self)

        self.frame = frame
        self.notebook = self.frame.notebookPanel.notebook

        self.mergetimer = None
        self.notifiedtabs = oset()
        self.titletimer = TitleBarTimer(self.frame, self.notifiedtabs)

        self.BindEventsToFrame()
Ejemplo n.º 7
0
    def __init__(self, frame):
        wx.EvtHandler.__init__(self)

        self.frame = frame
        self.notebook = self.frame.notebookPanel.notebook

        self.mergetimer = None
        self.notifiedtabs = oset()
        self.titletimer = TitleBarTimer(self.frame, self.notifiedtabs)

        self.BindEventsToFrame()
Ejemplo n.º 8
0
    def fix_group_c8(self):
        """
        Sometimes SSI group index ids (0xc8) are little endian instead of the
        expected big endian.

        This function attempts to guess when that is happening, and fix them.
        """
        groups = [key for key in self if key[1] == 0]
        dgi = dict.__getitem__

        for g_id, i_id in groups:
            if not g_id:  # root group
                # find all group ssis
                members = [key for key in groups if key != (0, 0)]
                # extract group ids
                m_ids = set(x[0] for x in members)
                assert (g_id, i_id) == (0, 0)
                # get group ids from root group
                gm_ids = dgi(self, (0, 0)).tlvs.get(0xC8, [])
            else:
                # find all the ssis which match this group id
                members = [key for key in self if key[0] == g_id and key[1] != 0]
                # extract member item ids
                m_ids = set(x[1] for x in members)
                # grab the member ids the group thinks it has
                gm_ids = dgi(self, (g_id, i_id)).tlvs.get(0xC8, [])
                # if they're the same, move on.
            if m_ids == set(gm_ids) and len(m_ids) == len(gm_ids):
                continue

            # map the group's ids to their position in the known list.
            known_locs = dict((y, x) for x, y in enumerate(oset(gm_ids)))
            locations = {}
            for possible in m_ids:
                # for each real id:
                if possible in known_locs:
                    # if the group has a location for it, use that one.
                    locations[possible] = known_locs[possible]
                    continue
                # otherwise, see if we have an inverted location for it.
                inverted = byteswap(possible)
                # even if somehow there was a collision, they'll just be put next to each other.
                # close enough.
                if inverted in known_locs:
                    locations[possible] = known_locs[inverted]
                    continue
                # otherwise, throw it at the end.
                locations[possible] = len(m_ids)
            new_gm_ids = sorted(m_ids, key=locations.__getitem__)
            # setting this should do no harm, since the only thing that can happen to an
            # ssi is deletion or modification.  deleted doesn't matter, modified fixes this
            # on the server as well.
            dgi(self, (g_id, i_id)).tlvs[0xC8] = new_gm_ids
Ejemplo n.º 9
0
    def UpdateAccountItems(self):

        if not hasattr(self,'acctitems'):
            self.acctitems = oset()

        acctitems = self.acctitems

        accounts = oset(profile.account_manager.connected_accounts)
        accts    = set(item.id for item in acctitems)

        newaccts = accounts - accts
        oldaccts = accts - accounts

        for item in set(acctitems):
            if item.id in oldaccts:
                acctitems.remove(item)

        for account in newaccts:
            if account.allow_contact_add:
                acctitems.add(SimpleMenuItem([account.serviceicon.Resized(16),account.username],id = account))


        return list(acctitems)
Ejemplo n.º 10
0
def run(args):
    src = path(args.src)
    revs = enumerate_revisions(src)
    dist = path(args.dist)

    feature_pth = dist / args.feature

    from StringIO import StringIO
    from collections import defaultdict
    from util.primitives.structures import oset
    versions = oset()
    groups = defaultdict(list)
    for domain, locale, pofile, catalog, template_version in versioned_pos('.'):
        versions.add(template_version)
        groups[template_version].append((domain, locale, pofile, catalog))

    for template_version in versions:
        plugins = {}
        template_root = feature_pth / template_version
        for domain, locale, pofile, catalog in groups[template_version]:
            revid, revno = revs[src.relpathto(pofile).expand()]
            out_zip = template_root / locale / '-'.join([domain, template_version, locale, str(revno)]) + '.zip'
            if not out_zip.parent.isdir():
                out_zip.parent.makedirs()
            mobuf = StringIO()
            write_mo(mobuf, catalog)
            zbuf = StringIO()
            z = zipfile.ZipFile(zbuf, 'w', zipfile.ZIP_DEFLATED)
            z.writestr('-'.join([domain, locale]) + '.mo', mobuf.getvalue())
            infoyaml = info_yaml(args.feature, domain, locale)
            try:
                infoyaml['name'] = u'%s (%s)' % (babel.Locale(locale).get_display_name('en'),
                                                 babel.Locale(locale).get_display_name(locale))
            except Exception:
                pass
            infoyaml['pot_version'] = template_version
            infoyaml['bzr_revno'] = revno
            infoyaml['bzr_revid'] = revid
            infoyaml['catalog_format'] = 'mo'
            infoyaml_bin = syck.dump(infoyaml)
            z.writestr(INFOYAML, infoyaml_bin)
            z.close()
            zout = zbuf.getvalue()
            with out_zip.open('wb') as out:
                out.write(zout)
            infoyaml_pth =(out_zip.parent/INFOYAML)
            with infoyaml_pth.open('wb') as infoyaml_out:
                infoyaml_out.write(infoyaml_bin)
            plugins[infoyaml['shortname']] = dict(
                                                  meta = httprelpath(template_root.relpathto(infoyaml_pth)),
                                                  dist_types = ZIP_DIST,
                                                  zip = dict(
                                                             location = httprelpath(template_root.relpathto(out_zip))
                                                             )
                                                  )
        idxyaml = template_root / 'index.yaml'
        idxbin = syck.dump(dict(plugins=plugins))
        with idxyaml.open('wb') as idx_out:
            idx_out.write(idxbin)
    update_pth = feature_pth / 'update.yaml'
    with open(update_pth, 'wb') as update_out:
        update_out.write(syck.dump({'all':{'release':httprelpath(feature_pth.relpathto(idxyaml))}}))
    try:
        site_d = syck.load(wget('http://s3.amazonaws.com/update.digsby.com/' + dist.name + '/site.yaml'))
    except Exception:
        traceback.print_exc()
        site_d = {}
    try:
        featurs = site_d['features']
    except KeyError:
        featurs = site_d['features'] = {}
    featurs[args.feature]= {
                           'name':args.name,
                           'url': httprelpath(dist.relpathto(update_pth)),
                           }
    with open(dist / 'site.yaml', 'wb') as site_out:
        site_out.write(syck.dump(site_d))
Ejemplo n.º 11
0
def get_accts_text(type_=''):
    from common import profile, protocolmeta
    protos = oset(a.protocol for a in getattr(profile.account_manager, type_ +
                                              'accounts'))
    protos = (protocol_ordering & protos) | protos
    return ', '.join(protocolmeta.nice_name_for_proto(name) for name in protos)
Ejemplo n.º 12
0
 def _filtered_contacts(self):
     return dict((groupname, filter(lambda c: not c.endswith('guest.digsby.org'), list(oset(ordering))))
                         for groupname, ordering in self.order['contacts'].iteritems())
Ejemplo n.º 13
0
    def update_data(self, data):
        """
        Updates this store's current state with incoming data from the network.

        data should be a mapping containing 'metacontacts', 'order', and 'info'
        structures (see comment at top of file)
        """
        rebuild = False

        # This method needs to substitute some defaultdicts for the normal
        # dictionaries that come back from the server.

        # Metacontact information

        #if data['metacontacts']
        mc_dict = data.get('metacontacts', {})
        if not isinstance(mc_dict, dict):
            log.critical('invalid metacontacts dictionary')
            mc_dict = {}

        # Contact information like SMS numbers and email addresses.
        self.info = defaultdict(dict)

        si = self.info
        if 'info' in data:
            for (k, v) in data['info'].iteritems():
                if isinstance(k, str):
                    cmpk = k.decode('utf8')
                else:
                    cmpk = k

                if not isinstance(cmpk, unicode):
                    continue

                if cmpk.startswith('Meta') or any((cmpk.endswith('_' + prot)
                                                   for prot in protocols.iterkeys())):
                    if any(v.values()):
                        si[k] = v

            for c, v in si.iteritems():
                for attr in ('email', 'sms'):
                    if attr in v:
                        self.contact_info_changed(c, attr, v[attr])

        self.metacontacts = MetaContactManager(self, mc_dict)
        if hasattr(self, 'new_sorter'):
            on_thread('sorter').call(self.new_sorter.removeAllContacts)
        rebuild = True

        # Manual ordering of groups
        try:
            self.order = deepcopy(data['order'])
            self.order['groups'] = list(oset(self.order['groups']))
            contacts = self._filtered_contacts()
            self.order['contacts'] = defaultdict(list)
            self.order['contacts'].update(contacts)
        except Exception:
            log.critical('error receiving order')
            self._init_order()

        # note: loading tofrom data from the network is deprecated. this data
        # now goes out to disk. see save/load_local_data
        if 'tofrom' in data and isinstance(data['tofrom'], dict) and \
            'im' in data['tofrom'] and 'email' in data['tofrom']:
            self.dispatch.set_tofrom(deepcopy(data['tofrom']))

        if rebuild:
            self.rebuild()

        self.update_order()
Ejemplo n.º 14
0
    def collect(self, *roots):
        '''
        For contacts which are in metacontacts, remove them from the original
        protocol groups and add them to a new group.

        Returns that new group full of DGroups holding MetaContacts.
        '''

        # Remove meta contacts
        mc_root = DGroup('Root', protocols = [], ids = [])

        b2m = self.buddies_to_metas# =  = defaultdict(set) #map buddy description to set of metas

        groupnames = oset()

        # For each protocol root group
        cs = defaultdict(list)
        mc_gnames = self.groupnames
        metacontact_objs = self.metacontact_objs

        def maybe_remove_contact(contact, group):
            if (contact.name.lower(), contact.service) in b2m:
                for meta in b2m[(contact.name.lower(), contact.service)]:
                    cs[meta.id].append(contact)

                group.remove(contact)
                return True

            return False

        from contacts.Group import GroupTypes

        for root in roots:
            # Find the corresponding group
            for group in list(root):
                gflag = False

                if group is root:
                    continue

                if isinstance(group, GroupTypes):
                    for elem in list(group):
                        gflag |= maybe_remove_contact(elem, group)

                    if gflag and (group.name not in groupnames):
                        groupnames.add(group.name)
                else:
                    # contact
                    elem = group
                    if maybe_remove_contact(elem, root):
                        groupnames.add(get_fakeroot_name())

        assert not set(cs.keys()) - set(self.keys())

        for id in self.iterkeys():
            elems = cs[id]
            order = [b.tag for b in self[id].buddies]
            elems = list(sorted(elems, key = lambda elem: order.index((elem.name.lower(), elem.service))))

            out = []
            hidden = []
            for tag in order:
                online = False
                while elems and (elems[0].name.lower(), elems[0].service) == tag:
                    b = elems.pop(0)
                    if not online:
                        out.append(b)
                        online = True
                    else:
                        hidden.append(b)
                if not online:
                    old = [o for o in metacontact_objs[id] if
                           (isinstance(o, OfflineBuddy) and (o.name.lower(), o.service) == tag)]
                    if old:
                        out.append(old[0])
                    else:
                        out.append(OfflineBuddy(*tag))

            metacontact_objs[id].set_new(out, hidden)

        groups = {}
        for m in metacontact_objs.itervalues():
            if any(not isinstance(b, OfflineBuddy) for b in m):
                for gname in self[m.id].groups:
                    try:
                        g = groups[gname[0]]
                    except KeyError:
                        groups[gname[0]] = g = DGroup(gname[0])
                    g.append(m)

        glen = len(groups)
        nextroot = DGroup('Root')
        for gname in groupnames:
            if gname in groups:
                nextroot.append(groups.pop(gname))

        for gname in set(g[0] for g in mc_gnames) - set(groupnames):
            if gname in groups:
                nextroot.append(groups.pop(gname))

        mc_root.extend(nextroot)
#        assert len(nextroot) == glen
        return mc_root
Ejemplo n.º 15
0
def get_accts_text(type_=''):
    from common import profile, protocolmeta
    protos = oset(a.protocol for a in getattr(profile.account_manager, type_ + 'accounts'))
    protos = (protocol_ordering & protos) | protos
    return ', '.join(protocolmeta.nice_name_for_proto(name) for name in protos)
Ejemplo n.º 16
0
    return UrlQuery(NEWSFEED_CAMPAIGN(), utm_source='merged')


def ACCT_BASE(protocol):
    return UrlQuery(NEWSFEED_CAMPAIGN(), utm_source=str(protocol))


def COUNT_BASE(type_):
    return UrlQuery(NEWSFEED_CAMPAIGN(), utm_source=type_)


def clicksrc(base, source):
    return UrlQuery(base, utm_term=source)


protocol_ordering = oset(['fbchat', 'facebook'])


def get_acct_name(protocol):
    from common import protocolmeta
    return protocolmeta.nice_name_for_proto(protocol)


def get_accts_text(type_=''):
    from common import profile, protocolmeta
    protos = oset(a.protocol for a in getattr(profile.account_manager, type_ +
                                              'accounts'))
    protos = (protocol_ordering & protos) | protos
    return ', '.join(protocolmeta.nice_name_for_proto(name) for name in protos)

Ejemplo n.º 17
0
    def get_stream_success(self, value, update=False, num_tries=0):
        from util import default_timer
        self.end_get_stream = default_timer()
        log.debug('stream get took %f seconds', self.end_get_stream - self.start_get_stream)
        stream = value
#        v = []
#        for val in value:
#            v.append(simplejson.loads(val, object_hook=facebookapi.storageify))
#        alerts, stream = v[:2]
        self.last_alerts = Alerts(self.acct)
        from facebookapi import simplify_multiquery
        try:
#            print stream
            new_stream = simplify_multiquery(stream,keys={'posts':None,
#                                                          'comments':None,
                                                          'latest_posts':None,
                                                          'profiles':'id',
#                                                          'now':None,
                                                          'events':list,
                                                          'status':None,
                                                          'notifications': None,
                                                          'apps' : 'app_id',
                                                          'post_filter_keys':None,
                                                           'filter_keys':'filter_key'})# 'birthdays':'uid',})
            import util.primitives.funcs as funcs
#            new_stream['comments'] = dict(funcs.groupby(new_stream['comments'], lambda x: x['post_id']))
            new_stream['comments'] = {}
            new_stream['post_ids'] = post_ids = {}
            for k, v in new_stream['filter_keys'].iteritems():
                if not v.get('name'):
                    v['name'] = KNOWN_APPS_LOOKUP.get(k, v.get('name'))
            new_stream['filter_keys'].update([(k, dict(name=d['name'],
                  icon_url=skin.get(d['icon_url']).path.url())) for k,d in FORCED_KEYS.items()])
            new_stream['post_filter_keys'] = dict((post_id, structures.oset(p['filter_key'] for p in vals))
                                             for post_id, vals in
                                             funcs.groupby(new_stream['post_filter_keys'], lambda x: x['post_id']))
            for post in new_stream['posts']:
                post['comments']['count'] = int(post['comments']['count'])
            new_stream['apps'], apps_str = {}, new_stream['apps']
            for app_id, app_dict in apps_str.items():
                new_stream['apps'][int(app_id)] = app_dict
            try:
                new_stream['now'] = new_stream['now'][0].values()[0]
            except (IndexError, KeyError) as _e:
#                print_exc()
                import time
                new_stream['now'] = time.time()
            self.last_alerts.event_invites &= set(new_stream['events'])
            self.last_status = (new_stream['status'][:1] or [Ostorage([('message', ''), ('status_id', 0), ('time', 0)])])[0]
            self.last_status['uid'] = self.digsby.uid
            if not isinstance(new_stream['posts'], list):
                log.error('stream: %r', stream)
                raise ValueError('Facebook returned type=%r of posts' % type(new_stream['posts']))
            for post in new_stream['posts']:     #get the new ones
                post_ids[post['post_id']] = post
            if 'notifications' in new_stream:
                import lxml
                for notification in new_stream['notifications']:
                    title_html = notification.get('title_html', None)
                    if title_html is None:
                        continue
                    s = lxml.html.fromstring(title_html)
                    s.make_links_absolute('http://www.facebook.com', resolve_base_href = False)
                    for a in s.findall('a'):
                        a.tag = 'span'
#                        _c = a.attrib.clear()
                        a.attrib['class'] = 'link notification_link'
                    [x.attrib.pop("data-hovercard", None) for x in s.findall(".//*[@data-hovercard]")]
                    notification['title_html'] = lxml.etree.tostring(s)
                self.last_alerts.update_notifications(new_stream['notifications'])
            if update:
                latest_posts = filter(None, (post_ids.get(post_id, self.last_stream.post_ids.get(post_id)) for post_id in
                                             structures.oset([post['post_id'] for post in new_stream['latest_posts']] +
                                              [post['post_id'] for post in self.last_stream.posts])))[:POSTS_LIMIT]
                new_stream['posts'] = latest_posts
                for post in new_stream['posts']:     #update the dict with the combined list
                    post_ids[post['post_id']] = post
                for key in self.last_stream.comments:
                    if key in post_ids and key not in new_stream.comments:
                        new_stream.comments[key] = self.last_stream.comments[key]
                for key in self.last_stream.profiles:
                    if key not in new_stream.profiles:
                        new_stream.profiles[key] = self.last_stream.profiles[key]
            trim_profiles(new_stream)
            for p in new_stream.posts: p.id = p.post_id # compatability hack for ads
            self.last_stream = new_stream
            self.social_feed.new_ids([p['post_id'] for p in self.last_stream.posts])
        except Exception, e:
            traceback.print_exc()
            return self.get_stream_error(num_tries=num_tries, error=e)
Ejemplo n.º 18
0
 def __init__(self, provider):
     self.logins = oset()
     self.provider = provider
     self.active = False
Ejemplo n.º 19
0
 def _get_order(self):
     self_order = oset([a.id for a in self])
     import services.service_provider as sp
     container = sp.ServiceProviderContainer(self.profile)
     sp_order = oset(container.get_order())
     return list(sp_order | self_order)
Ejemplo n.º 20
0
    def collect(self, *roots):
        '''
        For contacts which are in metacontacts, remove them from the original
        protocol groups and add them to a new group.

        Returns that new group full of DGroups holding MetaContacts.
        '''

        # Remove meta contacts
        mc_root = DGroup('Root', protocols=[], ids=[])

        b2m = self.buddies_to_metas  # =  = defaultdict(set) #map buddy description to set of metas

        groupnames = oset()

        # For each protocol root group
        cs = defaultdict(list)
        mc_gnames = self.groupnames
        metacontact_objs = self.metacontact_objs

        def maybe_remove_contact(contact, group):
            if (contact.name.lower(), contact.service) in b2m:
                for meta in b2m[(contact.name.lower(), contact.service)]:
                    cs[meta.id].append(contact)

                group.remove(contact)
                return True

            return False

        from contacts.Group import GroupTypes

        for root in roots:
            # Find the corresponding group
            for group in list(root):
                gflag = False

                if group is root:
                    continue

                if isinstance(group, GroupTypes):
                    for elem in list(group):
                        gflag |= maybe_remove_contact(elem, group)

                    if gflag and (group.name not in groupnames):
                        groupnames.add(group.name)
                else:
                    # contact
                    elem = group
                    if maybe_remove_contact(elem, root):
                        groupnames.add(get_fakeroot_name())

        assert not set(cs.keys()) - set(self.keys())

        for id in self.iterkeys():
            elems = cs[id]
            order = [b.tag for b in self[id].buddies]
            elems = list(
                sorted(elems,
                       key=lambda elem: order.index(
                           (elem.name.lower(), elem.service))))

            out = []
            hidden = []
            for tag in order:
                online = False
                while elems and (elems[0].name.lower(),
                                 elems[0].service) == tag:
                    b = elems.pop(0)
                    if not online:
                        out.append(b)
                        online = True
                    else:
                        hidden.append(b)
                if not online:
                    old = [
                        o for o in metacontact_objs[id]
                        if (isinstance(o, OfflineBuddy) and (o.name.lower(),
                                                             o.service) == tag)
                    ]
                    if old:
                        out.append(old[0])
                    else:
                        out.append(OfflineBuddy(*tag))

            metacontact_objs[id].set_new(out, hidden)

        groups = {}
        for m in metacontact_objs.itervalues():
            if any(not isinstance(b, OfflineBuddy) for b in m):
                for gname in self[m.id].groups:
                    try:
                        g = groups[gname[0]]
                    except KeyError:
                        groups[gname[0]] = g = DGroup(gname[0])
                    g.append(m)

        glen = len(groups)
        nextroot = DGroup('Root')
        for gname in groupnames:
            if gname in groups:
                nextroot.append(groups.pop(gname))

        for gname in set(g[0] for g in mc_gnames) - set(groupnames):
            if gname in groups:
                nextroot.append(groups.pop(gname))

        mc_root.extend(nextroot)
        #        assert len(nextroot) == glen
        return mc_root
Ejemplo n.º 21
0
 def get_stream_success(self, value, update=False, num_tries=0):
     from util import default_timer
     self.end_get_stream = default_timer()
     log.debug('stream get took %f seconds',
               self.end_get_stream - self.start_get_stream)
     stream = value
     #        v = []
     #        for val in value:
     #            v.append(simplejson.loads(val, object_hook=facebookapi.storageify))
     #        alerts, stream = v[:2]
     self.last_alerts = Alerts(self.acct)
     from facebookapi import simplify_multiquery
     try:
         #            print stream
         new_stream = simplify_multiquery(
             stream,
             keys={
                 'posts': None,
                 #                                                          'comments':None,
                 'latest_posts': None,
                 'profiles': 'id',
                 #                                                          'now':None,
                 'events': list,
                 'status': None,
                 'notifications': None,
                 'apps': 'app_id',
                 'post_filter_keys': None,
                 'filter_keys': 'filter_key'
             })  # 'birthdays':'uid',})
         import util.primitives.funcs as funcs
         #            new_stream['comments'] = dict(funcs.groupby(new_stream['comments'], lambda x: x['post_id']))
         new_stream['comments'] = {}
         new_stream['post_ids'] = post_ids = {}
         for k, v in new_stream['filter_keys'].iteritems():
             if not v.get('name'):
                 v['name'] = KNOWN_APPS_LOOKUP.get(k, v.get('name'))
         new_stream['filter_keys'].update([
             (k,
              dict(name=d['name'],
                   icon_url=skin.get(d['icon_url']).path.url()))
             for k, d in FORCED_KEYS.items()
         ])
         new_stream['post_filter_keys'] = dict(
             (post_id, structures.oset(p['filter_key'] for p in vals))
             for post_id, vals in funcs.groupby(
                 new_stream['post_filter_keys'], lambda x: x['post_id']))
         for post in new_stream['posts']:
             post['comments']['count'] = int(post['comments']['count'])
         new_stream['apps'], apps_str = {}, new_stream['apps']
         for app_id, app_dict in apps_str.items():
             new_stream['apps'][int(app_id)] = app_dict
         try:
             new_stream['now'] = new_stream['now'][0].values()[0]
         except (IndexError, KeyError) as _e:
             #                print_exc()
             import time
             new_stream['now'] = time.time()
         self.last_alerts.event_invites &= set(new_stream['events'])
         self.last_status = (new_stream['status'][:1] or [
             Ostorage([('message', ''), ('status_id', 0), ('time', 0)])
         ])[0]
         self.last_status['uid'] = self.digsby.uid
         if not isinstance(new_stream['posts'], list):
             log.error('stream: %r', stream)
             raise ValueError('Facebook returned type=%r of posts' %
                              type(new_stream['posts']))
         for post in new_stream['posts']:  #get the new ones
             post_ids[post['post_id']] = post
         if 'notifications' in new_stream:
             import lxml
             for notification in new_stream['notifications']:
                 title_html = notification.get('title_html', None)
                 if title_html is None:
                     continue
                 s = lxml.html.fromstring(title_html)
                 s.make_links_absolute('http://www.facebook.com',
                                       resolve_base_href=False)
                 for a in s.findall('a'):
                     a.tag = 'span'
                     #                        _c = a.attrib.clear()
                     a.attrib['class'] = 'link notification_link'
                 [
                     x.attrib.pop("data-hovercard", None)
                     for x in s.findall(".//*[@data-hovercard]")
                 ]
                 notification['title_html'] = lxml.etree.tostring(s)
             self.last_alerts.update_notifications(
                 new_stream['notifications'])
         if update:
             latest_posts = filter(None, (post_ids.get(
                 post_id, self.last_stream.post_ids.get(post_id)
             ) for post_id in structures.oset(
                 [post['post_id'] for post in new_stream['latest_posts']] +
                 [post['post_id']
                  for post in self.last_stream.posts])))[:POSTS_LIMIT]
             new_stream['posts'] = latest_posts
             for post in new_stream[
                     'posts']:  #update the dict with the combined list
                 post_ids[post['post_id']] = post
             for key in self.last_stream.comments:
                 if key in post_ids and key not in new_stream.comments:
                     new_stream.comments[key] = self.last_stream.comments[
                         key]
             for key in self.last_stream.profiles:
                 if key not in new_stream.profiles:
                     new_stream.profiles[key] = self.last_stream.profiles[
                         key]
         trim_profiles(new_stream)
         for p in new_stream.posts:
             p.id = p.post_id  # compatability hack for ads
         self.last_stream = new_stream
         self.social_feed.new_ids(
             [p['post_id'] for p in self.last_stream.posts])
     except Exception, e:
         traceback.print_exc()
         return self.get_stream_error(num_tries=num_tries, error=e)
Ejemplo n.º 22
0
 def _get_order(self):
     self_order = oset([a.id for a in self])
     import services.service_provider as sp
     container = sp.ServiceProviderContainer(self.profile)
     sp_order = oset(container.get_order())
     return list(sp_order | self_order)
Ejemplo n.º 23
0
    def get_ordered(self, new=()):
        if self.order is None:
            return
        order = self.order
        acct_position = dict((v,k) for (k,v) in enumerate(order))
        from collections import defaultdict
        types = defaultdict(list)
        provider_list = self.existing_sps.values()
        provider_lookup = dict()
        account_lookup = dict()
        for provider in provider_list:
            for type_, acct in provider.accounts.items():
                if acct in new:
                    assert len(new) == 1
                    assert len(provider.accounts) > len(new)
                    continue
                types[type_].append(acct)
                provider_lookup[acct.id] = provider
                account_lookup[acct.id] = acct
        for val in types.values():
            val.sort(key = lambda a: acct_position.get(a.id, 1000))
        loc = dict(enumerate(provider_list))
        from util.primitives.mapping import dictreverse
        loc2 = dictreverse(loc)
        chains = [types['im'], types['email'], types['social']]

        #this adds just a little more information about the relationship between
        #im/email/social accounts.
        total_chain = oset(account_lookup[id_] for id_ in order if id_ in account_lookup)
        for chain in chains:
            total_chain.update(chain)
        chains.append(total_chain)

        chains = [oset([loc2[provider_lookup[acct.id]] for acct in type_]) for type_ in chains]

        #enforce that if there is a previous ordering between two nodes,
        #then the reverse ordering is discarded
#        partial = set()
#        chains2 = []
#        for chain in chains:
#            chain2 = []
#            blacklist = []
#            for i, a in enumerate(chain):
#                if a in blacklist:
#                    continue
#                for b in chain[i:]:
#                    if a == b:
#                        continue
#                    node = (a,b)
#                    revnode = (b,a)
#                    if revnode in partial:
#                        #the conflict doesn't exist until we get to b
#                        #and it's farther down than this one, so discard b.
#                        _none = blacklist.append(b)
#                    else:
#                        _none = partial.add(node)
#                else:
#                    _none = chain2.append(a)
#            _none = chains2.append(chain2)
#            #_none is for pasting into the console, consumed return values aren't shown.
#        import util.primitives.topological_sort as tsort
#        order = tsort.topological_sort_chains(chains2)
        import util.primitives.topo_sort as tsort
        order = tsort.topological_sort_chains(chains)
        provider_accts = [loc[i] for i in order]

        out = []
        for prov in provider_accts:
            [out.append(a.id) for a in
             [prov.accounts.get(t, None) for t in
              ['im', 'email', 'social']]
             if a is not None]
        self.order = out
        return provider_accts
Ejemplo n.º 24
0
#ACTION_LINKS = [[dict(text=text, href=href) for text,href in l] for l in ACTION_LINKS1]

def MERGED_URL():
    return UrlQuery(NEWSFEED_CAMPAIGN(), utm_source='merged')

def ACCT_BASE(protocol):
    return UrlQuery(NEWSFEED_CAMPAIGN(), utm_source=str(protocol))

def COUNT_BASE(type_):
    return UrlQuery(NEWSFEED_CAMPAIGN(), utm_source=type_)

def clicksrc(base, source):
    return UrlQuery(base, utm_term=source)

protocol_ordering = oset(['fbchat', 'facebook'])

def get_acct_name(protocol):
    from common import protocolmeta
    return protocolmeta.nice_name_for_proto(protocol)

def get_accts_text(type_=''):
    from common import profile, protocolmeta
    protos = oset(a.protocol for a in getattr(profile.account_manager, type_ + 'accounts'))
    protos = (protocol_ordering & protos) | protos
    return ', '.join(protocolmeta.nice_name_for_proto(name) for name in protos)

def get_acct_properties(href):
    s_text  = get_accts_text('social')
    im_text = get_accts_text('')
    e_text  = get_accts_text('email')