def doCommandLine(complexType=True, debug=False, **kw): from util import Storage if debug: SetDebugCallback(None,None,None,None) if complexType: SetPyclassMetaclass(None,None,None,None, **{'module':'ZSI.generate.pyclass', 'metaclass':'pyclass_type'} ) options = Storage( file=None, url=None, schema=False, simple_naming=False, clientClassSuffix=None, aname=None, pyclassMapModule=None, address=False, extended=False, types=None, output_directory='.', ) options.update(kw) return options, ()
def export_csv(sources, min_date, max_date, place_id_filters, csv_path): if min_date: min_date = from_utc(min_date) max_date = from_utc(max_date) place_id_to_timestamps = Storage().load_sources(sources, min_timestamp=min_date, max_timestamp=max_date) export_place_id_to_timestamps_csv(place_id_to_timestamps, min_date, csv_path, place_id_filters=place_id_filters) else: existing_dates = set() for root, dirs, files in os.walk(csv_path): for fn in files: if fn.endswith(".csv"): try: existing_dates.add( datetime.datetime.strptime(fn[:10], "%Y-%m-%d").date() ) except ValueError: pass yesterday = to_utc(datetime.datetime.now() - datetime.timedelta(days=1)).date() current_date = datetime.date(2020, 3, 24) # my earliest records print(f"exporting {current_date} - {yesterday}") print(f"already existing: {existing_dates}") while current_date <= yesterday: if current_date not in existing_dates: min_date = datetime.datetime(current_date.year, current_date.month, current_date.day) max_date = min_date + datetime.timedelta(days=1) - datetime.timedelta(seconds=1) place_id_to_timestamps = Storage().load_sources(sources, min_timestamp=min_date, max_timestamp=max_date) export_place_id_to_timestamps_csv( place_id_to_timestamps, min_date, csv_path, place_id_filters=place_id_filters ) current_date += datetime.timedelta(days=1)
def load_pref_style(prefname): try: stylepref = pref(prefname) if isinstance(stylepref, basestring): style = Storage(Font = makeFont(stylepref)) else: style = Storage(stylepref) except Exception: print_exc() style = {} if type(style.Font) is tuple: try: font = tuple_to_font(style.Font) except Exception: print_exc() font = default_msg_font() else: font = style.Font fgc = try_this(lambda: wx.Colour(*style.TextColour), None) or wx.BLACK bgc = try_this(lambda: wx.Colour(*style.BackgroundColour), None) or wx.WHITE return font, fgc, bgc
def _update_ccard_album(self, elt): photos = [] for subel in elt: if subel._attrs.get('type', '') == 'Photo': photos.append(Storage((e._name, e._cdata) for e in subel)) album = self.space.setdefault('album',Storage()) album.update((e._name, e._cdata) for e in elt if 'type' not in e._attrs) album.photos = photos
def _update_ccard_musiclist(self, elt): musiclist = self.space.setdefault('musiclist', Storage()) songs = [] for song in elt: if song._attrs.get('type', '') == 'MusicListEntry': songs.append(Storage((e._name, e._cdata) for e in song)) musiclist.update((e._name, e._cdata) for e in elt if 'type' not in e._attrs) musiclist.songs = songs
def __init__(self, mqtt_client: MQTTClient, device_sessions: typing.Dict[str, Session]): super().__init__(name="discovery", daemon=True) self.__mqtt_client = mqtt_client self.__device_sessions = device_sessions self.__device_pool: typing.Dict[str, Device] = dict() self.__publish_flag = False self.__lock = threading.Lock() self.__local_storage = Storage(conf.Discovery.db_path, "devices", (Discovery.__devices_table, ))
def _update_ccard_profile(self, elt): profiles = self.space.setdefault('profiles',Storage()) for profile in elt: p_type = profile._attrs.get('type','') if p_type.endswith('Profile'): prof = profiles.setdefault(p_type.lower()[:-7], Storage()) prof.update((e._name, e._cdata) for e in profile) profiles.update((e._name, e._cdata) for e in elt if 'type' not in e._attrs)
def _update_ccard_booklist(self, elt): booklist = self.space.setdefault('booklist',Storage()) books = [] for book in elt: if book._attrs.get('type','') == 'BookListEntry': books.append(Storage((e._name, e._cdata) for e in book)) booklist.update((e._name, e._cdata) for e in elt if 'type' not in e._attrs) booklist.books = books
def _update_ccard_blog(self, elt): blog = self.space.setdefault('blog',Storage()) posts = [] for post in elt: if post._attrs.get('type','') == 'Post': posts.append(Storage((e._name, e._cdata) for e in post)) blog.update((e._name, e._cdata) for e in elt if 'type' not in e._attrs) blog.posts = posts
def _update_ccard_genericlist(self, elt): gen_lists = self.space.setdefault('gen_lists', []) entries = [] for entry in elt: if entry._attrs.get('type','') == 'GenericListEntry': entries.append(Storage((e._name, e._cdata) for e in entry)) new_list = Storage((e._name, e._cdata) for e in elt if 'type' not in e._attrs) new_list.entries = entries gen_lists.append(new_list)
def handle_packet(self, pkt, session): from util import Storage header = Storage() header.command = int(pkt['Command']) header.status = int(pkt['Status']) account_id, session_id = session['SessionId'].split('-', 1) self.account_id = account_id header.session_id = self.session_id = int(session_id) data = pkt._cdata.strip().encode('utf-8') YahooConnectionBase.handle_packet(self, header, data) if header.command == 241 and header.status == 0: self.server_update()
def __init__(self, profile): Observable.__init__(self) self.accounts_loaded = False self.profile = profile self.connected_accounts = ObservableList() self.reconnect_timers = {} # holds "cancel" objects from Popups self.cancellers = {} self.profile.add_observer(self.on_state_change, 'state') self.profile.add_observer(self.on_offline_change, 'offline_reason') self.profile.add_observer(self.profile_state_changed, 'state') import wx wx.GetApp().OnBuddyListShown.append(lambda *a, **k: Timer(.25, threaded(self.release_accounts), *a, **k).start()) self._hash = sentinel self.got_accounts = False self.acct_calls = Delegate() self.delay_accounts = True self.acct_delay_lock = RLock() self._all_acct_hash = {} self.last_server_order = None self._all_accounts = Storage() for type_ in ('im', 'em', 'so'): s = Storage(accounts = ObservableList(), old = []) setattr(self._all_accounts, type_, s) # when the order of accounts changes, or accounts are added or deleted, # calls profile.accounts_changed('im', list) s.accounts.add_observer(getattr(self, type_ + '_accounts_changed')) self.accounts = self._all_accounts.im.accounts self.emailaccounts = self._all_accounts.em.accounts self.socialaccounts = self._all_accounts.so.accounts self.buddywatcher = BuddyWatcher() import services.service_provider as sp container = sp.ServiceProviderContainer(self.profile) container.on_order_changed += self._set_order
def info_email(self, info): info.update(Storage(updatefreq = int(self.updatefreq.Value)*60)) if hasattr(self, 'mailclient'): assert isinstance(self.mailclient, basestring) info.update(dict(mailclient = self.mailclient, custom_inbox_url = self.custom_inbox_url, custom_compose_url = self.custom_compose_url)) if hasattr(self, 'emailserver'): # email server information servertype = self.protocolinfo.needs_server.lower() info.update({servertype + 'server': self.emailserver.Value, servertype + 'port' : int(self.emailport.Value) \ if self.emailport.Value else '', 'require_ssl': self.require_ssl.Value}) if hasattr(self, 'smtp_server'): info.update(email_address = self.email_address.Value, smtp_server = self.smtp_server.Value, smtp_port = int(self.smtp_port.Value) if self.smtp_port.Value else '', smtp_require_ssl = self.smtp_require_ssl.Value) if self.smtp_same.Value: info.update(smtp_username = self.name.Value, smtp_password = self.password.Value) else: info.update(smtp_username = self.smtp_username.Value, smtp_password = self.smtp_password.Value)
def get_iterator(self): iterator_info = Storage(id=self.id, context=self.context, iterator=self.get_feed_items()) # allow plugins to wrap/transform the generator return hooks.reduce('social.feed.iterator', iterator_info).iterator
def benchmark(fbdata): from util import Storage account = Storage(protocol='fb20', connection=Storage(last_stream=fbdata['stream'], last_alerts=fbdata['alerts'], last_status=fbdata['status'])) from fb20.fbacct import FBIB def doit(): before = clock() FBIB(account, cache=False).get_html(None) return clock() - before print 'first ', doit() print 'second', doit()
def __init__(self, name, status = None, protocol = 'aim', capabilities=None): Observable.__init__(self) self.remote_alias = self.name = name self.mockprofile = getattr(MockProfiles,name,'') self.buddy = Storage() self.buddy.name = name self.buddy.nice_name = name self.buddy.profile = self.mockprofile self.icon = skin.get('BuddiesPanel.BuddyIcons.NoIcon') self.icon_path = self.icon.path self.icon = self.icon.PIL self.id = 5 self.status_message = random.choice(status_messages) self.sightly_status = self.status_orb = self.status = status if status else random.choice(statuses) self.buddy.away = self.status=='away' self.protocol = MockProtocol(protocol) self.protocol.icq = random.choice([True, False]) self.protocol.username = self.protocol.name self.mockcaps = capabilities if capabilities else [caps.BLOCKABLE, caps.EMAIL, caps.FILES, caps.IM, caps.PICTURES, caps.SMS] self.online_time = None self.buddy.protocol = self.protocol self.caps = self.mockcaps
def get_SI2_anchor(image): retval = Storage() if hasattr(image, 'offset'): retval['offset'] = image.offset if hasattr(image, 'valign'): retval['valign'] = image.valign if hasattr(image, 'halign'): retval['halign'] = image.halign return retval
def make_format_storage(font, fgc, bgc): return Storage(backgroundcolor = bgc, foregroundcolor = fgc, face = font.FaceName, size = font.PointSize, underline = font.Underlined, bold = font.Weight == wx.BOLD, italic = font.Style == wx.ITALIC, family = FamilyNameFromFont(font), )
def encoder(fmtstr): singleformat = fmtstr.format_as('singleformat') if singleformat is not None: from gui.uberwidgets.formattedinput2.fontutil import StorageToStyle formatstorage = singleformat['format'] if not isinstance(formatstorage, Storage): formatstorage = Storage(formatstorage) textattr = StorageToStyle(formatstorage) return RTFToX().Convert(singleformat['text'], fmt, 'plaintext', textattr)
def info(self): # Allows all text fields in this dialog to be optional. # # If "title" isn't specified but message is, then title becomes # the first line of message. # # If message is blank and title is not, message becomes title. # # If both are blank, both become the status. # status = self.status.db_val if not self.is_exception: title = self.title.Value if not title or title.isspace(): title = None message = self.message.Value if not message or message.isspace(): message = None if not self.is_exception: if title is None: title = message.split('\n')[0] if message is not None else status else: title = None if message is None: message = title if title is not None else _(status) s = Storage(message = message, status = status, format = self.message.Format) if not self.is_exception: # while editing exceptions, there isn't a title. s.title = title s.exceptions = self.exceptions.Exceptions if self.diffcheck.IsChecked() else {} from pprint import pformat log.info(pformat(dict(s))) return s
def find_attachments(email): attachments = {} for part in email: if (('Content-Disposition' in part.keys()) and ('attachment' in part['Content-Disposition'])): attachments[part.get_filename()] = Storage( data=part.get_payload(decode=True), content_type=part.get_content_type()) return attachments
def __init__(self, client, io_loop=None): self.client = client self.client_config = client.client_config self._io_loop = io_loop or IOLoop.current() self.stream = None self._connection_timeout_handler = None self._header_data = None self._body_data = None self._message = Storage()
def StyleToStorage(textattr): font = textattr.Font return Storage(backgroundcolor=tuple(textattr.BackgroundColour), foregroundcolor=tuple(textattr.TextColour), family=FamilyNameFromFont(font), face=font.FaceName, size=font.PointSize, underline=font.Underlined, bold=font.Weight == wx.BOLD, italic=font.Style == wx.ITALIC)
def __init__(self, name, url, whenDoneCB, onCancelCB, onErrorCB): self.whenDoneCB = whenDoneCB self.onCancelCB = onCancelCB self.onErrorCB = onErrorCB from gui import skin protocol = None buddy = Storage(name="Digsby Servers", alias="Digsby Servers", serviceicon=skin.get('serviceicons.digsby')) IncomingHTTPFileTransfer.__init__(self, protocol, buddy, name, url)
def __init__(self, images): self.images = images self.tags = {} self.drawrects = {} # record anchors for image in self.images: if hasattr(image, 'anchors'): for anchor in image.anchors: if 'tag' in anchor and anchor['tag'] is not None: self.tags[anchor['tag']] = image self.cached_result = Storage(bitmap=None, size=(0, 0))
def GetStyleAsStorage(self): tc = self.tc font = tc.Font return Storage( backgroundcolor = tuple(tc.BackgroundColour), foregroundcolor = tuple(tc.ForegroundColour), family = FamilyNameFromFont(font), face = font.FaceName, size = font.PointSize, underline = font.Underlined, bold = font.Weight == wx.BOLD, italic = font.Style == wx.ITALIC)
def main(): app = testapp() import cPickle fbdata_file = os.path.join(os.path.dirname(__file__), r'fbdata.dat') fbdata = cPickle.loads(open(fbdata_file, 'rb').read()) from util import Storage account = Storage(protocol='fb20', connection=Storage(last_stream=fbdata['stream'], last_alerts=fbdata['alerts'], last_status=fbdata['status'])) from fb20.fbacct import FBIB def doit(): before = clock() FBIB(account).get_html(None) return clock() - before print 'first ', doit() print 'second', doit()
def __init__(self): Observable.__init__(self) bud = MockBuddy('fakebuddy') self.name = 'fakebuddy' self.me = MockBuddy('digsby007') self.room_list = ObservableList([bud, self.me]) self.typing_status = ObservableDict() self.buddy = bud self.messages = Queue() self.protocol = Storage(self_buddy=self.me, buddies={'digsby007': self.me}) self.ischat = False
def _negotiatesocks5_gen(self): from struct import pack, unpack from util import Storage destaddr, destport = self.end uname, password = self._proxyinfo.get('username', ''), self._proxyinfo.get('password', '') this = Storage() this.errors = False this.authtype = 0 this.incoming_host_type = 0 def pstring(s): return chr(len(s)) + s def single_use_handler(f): @wraps(f) def wrapper(data): self.pop_handler() return f(data) return wrapper def if_errors_close(f): def wrapper(*a, **k): ok = not this.errors if ok: try: return f(*a, **k) except Exception, e: import traceback; traceback.print_exc() log.info('ProxySocket._negotiatesocks5: there was an error calling %r(*%r, **%r). the exception was: %r', f, a, k, e) this.errors = True self.close() return '',None else:
def __init__(self, host, port, af=socket.AF_INET, connect_timeout=0.2, waiting_timeout=0.2, request_timeout=2): self.host = host self.port = port self.af = af self.connect_timeout = connect_timeout self.waiting_timeout = waiting_timeout self.request_timeout = request_timeout self.request_message = Storage() self._request_callback = None self._user_request_callback = None
def _f(_message): log.debug("Request Message {}".format(_message.__dict__)) status = _message.topic content = _message.body if status == RESPONSE_ERROR_TAG: raise gen.Return(content) if not content: raise gen.Return(content) v = content try: v = Storage(json.loads(content)) except: v = content raise gen.Return(v)
def info(self): "Returns a Storage containing the attributes edited by this dialog." info = Storage(name=self.name.Value, protocol=self.protocol_name) info.protocol, info.name = strip_acct_id(info.protocol, info.name) if hasattr(self, "password"): info.password_len = len(self.password.Value) try: info.password = profile.crypt_pw(self.password.Value) except UnicodeEncodeError: # the database has corrupted the password. log.warning("corrupted password") info.password = "" self.password.Value = "" import hub hub.get_instance().on_error( "This account's password has been corrupted somehow. Please report it immediately." ) if hasattr(self, "host"): info.server = (self.host.Value, int(self.port.Value) if self.port.Value else "") if hasattr(self, "remote_alias"): info.remote_alias = self.remote_alias.Value if hasattr(self, "autologin"): info.autologin = bool(self.autologin.Value) if hasattr(self, "resource"): info.update( resource=self.resource.Value, priority=try_this(lambda: int(self.priority.Value), DEFAULT_JABBER_PRIORITY), ) # , # confserver = self.confserver.Value if hasattr(self, "dataproxy"): info.update(dataproxy=self.dataproxy.Value) for d in getattr(self.protocolinfo, "more_details", []): attr = d["store"] ctrl = getattr(self, attr) info[attr] = ctrl.Value getattr(self, "info_" + self.formtype, lambda *a: {})(info) for info_cb in self.info_callbacks: info_cb(info) defaults = self.protocolinfo.get("defaults", {}) for k in defaults: if k not in info: info[k] = getattr(self.account, k, defaults.get(k)) return info
class AccountManager(Observable, HashedAccounts): def __init__(self, profile): Observable.__init__(self) self.accounts_loaded = False self.profile = profile self.connected_accounts = ObservableList() self.reconnect_timers = {} # holds "cancel" objects from Popups self.cancellers = {} self.profile.add_observer(self.on_state_change, 'state') self.profile.add_observer(self.on_offline_change, 'offline_reason') self.profile.add_observer(self.profile_state_changed, 'state') import wx wx.GetApp().OnBuddyListShown.append(lambda *a, **k: Timer(.25, threaded(self.release_accounts), *a, **k).start()) self._hash = sentinel self.got_accounts = False self.acct_calls = Delegate() self.delay_accounts = True self.acct_delay_lock = RLock() self._all_acct_hash = {} self.last_server_order = None self._all_accounts = Storage() for type_ in ('im', 'em', 'so'): s = Storage(accounts = ObservableList(), old = []) setattr(self._all_accounts, type_, s) # when the order of accounts changes, or accounts are added or deleted, # calls profile.accounts_changed('im', list) s.accounts.add_observer(getattr(self, type_ + '_accounts_changed')) self.accounts = self._all_accounts.im.accounts self.emailaccounts = self._all_accounts.em.accounts self.socialaccounts = self._all_accounts.so.accounts self.buddywatcher = BuddyWatcher() import services.service_provider as sp container = sp.ServiceProviderContainer(self.profile) container.on_order_changed += self._set_order def get_account_for_protocol(self, proto): for account in self.connected_im_accounts: if getattr(account, 'connection', None) is proto: return account return None def find_account(self, username, protocol): for acct in self.all_accounts: if acct.username == username and acct.protocol == protocol: return acct return None @property def connected_im_accounts(self): 'A list of all connected IM accounts.' accts = list(self.connected_accounts) if self.profile in accts: accts.remove(self.profile) return accts def get_im_account(self, username, service): for acct in self.connected_accounts: conn = acct.connection if conn is not None: if conn.name == service and conn.username == username: return acct @property def all_accounts(self): 'Returns all IM, social, and email accounts in one list.' return self.accounts + self.emailaccounts + self.socialaccounts def profile_state_changed(self, src, attr, old, new): ''' notify target. used to initiate retrieval of accounts from the server when the profile goes online ''' assert src == self.profile assert attr == 'state' from digsby import protocol as digsby_protocol if old == new: return if new == digsby_protocol.Statuses.SYNC_PREFS: with traceguard: self.profile.connection.get_accounts(success=lambda stanza: self.finished_get( digsby.accounts.Accounts(stanza.get_query()))) def _xfrm_sort(self, src, cls, pred): ''' Filters sequence src by predicate pred, and sorts src by its 'order' attribute. Transforms elements by calling cls.from_net on each. ''' from common.protocolmeta import protocols, proto_init accounts = [] for x in src: if pred(x): with traceguard: if x.protocol not in protocols: log.info('don\'t know what kind of account %r is: %r', x.protocol, x) continue if protocols.get(x.protocol, {}).get('smtp_pw_type', False): cls2 = proto_init(x.protocol) acct = cls2.from_net(x) else: acct = cls.from_net(x) accounts.append(acct) return sorted(accounts, key=lambda x: src.order.index(x.id) if x.id in src.order else len(src)) def maybe_delay_accounts(self, cb): ''' if self.delay_accounts is True, calls cb later and returns True. ''' with self.acct_delay_lock: if self.delay_accounts: self.acct_calls.append(cb) return True def load_from_identity(self, identity = None): ''' isolating the behavior required to support identities without completely trashing the rest of the existing code. ''' self.setnotify('got_accounts', True) accounts = identity.load_data('accounts') self.replace_local(accounts, do_save=False) self.setnotify('accounts_loaded', True) def finished_get(self, accounts): ''' this is the response to a get request to the server. since we currently only request the entire list, the value we get here should be a complete representation of what the server has at this moment ''' self.setnotify('got_accounts', True) if self.maybe_delay_accounts(lambda: self.finished_get(accounts)): return if self._all_acct_hash != accounts.calc_hash(): accounts_debug("!!!!! last known server list is not the same as the server") accounts_debug('all_acct_hash: %r', self._all_acct_hash) accounts_debug('calc_hash(): %r', accounts.calc_hash()) self.replace_local(accounts) #or if the last recorded server order is not the same as the server now. elif self.last_server_order != accounts.order: accounts_debug("!!!!! the last recorded server order is not the same as the server now.") accounts_debug('last_server_order: %r', self.last_server_order) accounts_debug('accounts.order(): %r', accounts.order) self.replace_local(accounts) #if we made it this far, the server hasn't changed since we last heard #from it, therefore, push changes (if any) to the server. else: accounts_debug('!!!!! update_server') self.update_server(accounts) self.setnotify('accounts_loaded', True) def load_from_local(self, accounts, last_known_server_hash, last_server_order): self.setnotify('got_accounts', True) if self.maybe_delay_accounts(lambda: self.load_from_local(accounts, last_known_server_hash, last_server_order)): return self.replace_local(accounts, do_save=False) accounts_debug('_all_acct_hash: %r,\nlast_known_server_hash:%r', self._all_acct_hash, last_known_server_hash) self._all_acct_hash = last_known_server_hash accounts_debug('last_server_order: %r,\nlast_server_order:%r', self.last_server_order, last_server_order) self.last_server_order = last_server_order self.setnotify('accounts_loaded', True) def do_load_local_notification(self): if sys.opts.start_offline: return # if --online was passed on the command line, don't show a popup if self.maybe_delay_accounts(self.do_load_local_notification): return log.debug('local mode popup') fire('error', title = _('Digsby is running in "Local Mode"'), major = '', minor = _('Changes to Digsby preferences may not synchronize to your other PCs right away'), onclick = LOCAL_MODE_URL) def replace_local(self, accounts, do_save=True): ''' This function should replace the local list with the server list ''' accounts_debug('replace local') # find common simple hashes server_list = dict((a.min_hash(), a) for a in accounts) accounts_debug('server_list: %r, %r', server_list, accounts) local_list = dict((a.min_hash(), a) for a in self) accounts_debug('local_list: %r, %r', local_list, list(self)) common = set(server_list.keys()).intersection(set(local_list.keys())) accounts_debug('common: %r', common) # update update = [server_list[k] for k in common] accounts_debug('update: %r', update) # delete remainder of local list local_del = set(local_list.keys()) - common accounts_debug('local_del: %r', local_del) delete = [local_list[k] for k in local_del] accounts_debug('delete: %r', delete) # add remainder of new list remote_add = set(server_list.keys()) - common accounts_debug('remote_add: %r', remote_add) add = [server_list[k] for k in remote_add] accounts_debug('add: %r', add) # get rid of hashes for things that don't exist anymore. # can happen between logins when the server has changed, # though it should also require something to have happened locally. disappeared = (set(self._all_acct_hash.keys()) - set(a.id for a in accounts)) - set(a.id for a in self) for k in disappeared: self._all_acct_hash.pop(k, None) from digsby.accounts.accounts import Accounts add = Accounts(add, order = accounts.order) import services.service_provider as sp with sp.ServiceProviderContainer(self.profile).rebuilding() as container: self.acct_del(delete) self.acct_add(add) self.acct_update(update) container.rebuild(self) self.order_set(accounts.order) if do_save: self.save_all_info() def update_server(self, accounts): ''' This function should do the minmal amount of work required to synchronize our local list to the server and other remote clients. ''' accounts_debug('!!!!! update server list') server_list = dict((a.id, a) for a in accounts) accounts_debug('server_list: %r', server_list) local_list = dict((a.id, a) for a in self) accounts_debug('local_list: %r', local_list) common = set(server_list.keys()).intersection(set(local_list.keys())) accounts_debug('common: %r', common) #update update = [] for k in common: if local_list[k].total_hash() != server_list[k].total_hash(): accounts_debug("update append: %r != %r", local_list[k].total_hash(), server_list[k].total_hash()) update.append(local_list[k]) accounts_debug("update: %r", update) #delete remainder of local list remote_del = set(server_list.keys()) - common delete = [] for k in remote_del: delete.append(server_list[k]) accounts_debug("delete: %r", delete) #add remainder of new list remote_add = set(local_list.keys()) - common add = [] for k in remote_add: add.append(local_list[k]) accounts_debug("add: %r", add) conn = self.profile.connection from digsby.accounts import ADD, DELETE, UPDATE order = self.order[:] def set_order(*args, **kwargs): self.last_server_order = order def new_done(func=None): done = Delegate() if func is not None: done += func done += set_order done += self.save_server_info return done for a in delete: accounts_debug('deleting: %r, a.id: %r', a, a.id) def del_id(_, a=a): self._all_acct_hash.pop(a.id, None) done = new_done(del_id) if SAVE_ACCOUNTS: conn.set_account(a, action=DELETE, order=order, success=done) for _accounts, action in ((update, UPDATE), (add, ADD)): for a in _accounts: accounts_debug('%s: %r, a.id: %r, a.total_hash(): %r', action, a, a.id, a.total_hash()) h = a.total_hash() def on_update(_, a=a, h=h): a.store_hash(h) self._all_acct_hash[a.id] = h done = new_done(on_update) if SAVE_ACCOUNTS: conn.set_account(a, action=action, order=order, success=done) if self.profile.order != accounts.order: done = new_done() if SAVE_ACCOUNTS: conn.set_accounts(order=order, success=done) def update_account(self, account, force=False): ''' Called when an account changes. If the account is flagged that we are on the network thread (or at least as a result of network changes), then this does nothing. Otherwise, the account + new account order are pushed to the server in an update. ''' self.save_local_info() return if not SAVE_ACCOUNTS: return if account.isflagged(DELETING): return if force or not account.isflagged(NETWORK_FLAG): h = account.total_hash() order = self.order[:] try: def done(*a, **k): import services.service_provider as sp opts = account.get_options() sp.get_provider_for_account(account).update_info(opts) self.last_server_order = order account.store_hash(h) self._all_acct_hash[account.id] = account._total_hash self.save_server_info() if not force and h == account._total_hash and order == self.last_server_order \ and self._all_acct_hash[account.id] == account._total_hash: return self.profile.connection.set_account(account, action = 'update', order=order, success=done) except Exception: traceback.print_exc() def acct_del(self, accts): ''' Network account delete. ''' for acct in accts: for account in self: if acct.id == account.id: acct2 = account break else: acct2 = None if acct2 is not None: with self.accounts_flagged(NETWORK_FLAG): if get(acct2, 'enabled', False): acct2.enabled = False self.remove(acct2) self._all_acct_hash.pop(acct2.id, None) from gui import toast for id in getattr(acct2, 'popupids',()): toast.cancel_id(id) def _get_order(self): self_order = oset([a.id for a in self]) import services.service_provider as sp container = sp.ServiceProviderContainer(self.profile) sp_order = oset(container.get_order()) return list(sp_order | self_order) def _set_order(self, new): import services.service_provider as sp lookup = dict((v,k) for (k,v) in enumerate(new)) newlen = len(new) for k in ('im','em','so'): self._all_accounts[k].accounts.sort(key=lambda a: lookup.get(a.id, newlen)) container = sp.ServiceProviderContainer(self.profile) container.set_order(new) order = property(_get_order, _set_order) def order_set(self, order): ''' An order update, coming from the network. ''' with self.accounts_flagged(NETWORK_FLAG): self.order = order[:] self.last_server_order = order[:] def accounts_set(self, stanza=None, accounts=None): ''' Handle incoming network changes to the accounts list. ''' if stanza is None: assert accounts else: accounts = digsby.accounts.Accounts(stanza.get_query()) if self.maybe_delay_accounts(lambda: self.accounts_set(accounts=accounts)): return from digsby.accounts import ADD, UPDATE, DELETE, Accounts del_accts = [acct for acct in accounts if acct.action == DELETE] add_accts = [acct for acct in accounts if acct.action == ADD or acct.action == None] mod_accts = [acct for acct in accounts if acct.action == UPDATE] del_accts = Accounts(del_accts, accounts.order) add_accts = Accounts(add_accts, accounts.order) mod_accts = Accounts(mod_accts, accounts.order) import services.service_provider as sp with sp.ServiceProviderContainer(self.profile).rebuilding() as container: self.acct_del(del_accts) self.acct_add(add_accts) self.acct_update(mod_accts) self.save_all_info() container.rebuild(self) self.order_set(accounts.order) def acct_add(self, accts): ''' Network account add. ''' with self.accounts_flagged(NETWORK_FLAG): self.add_all(accts) def acct_update(self, accts): ''' Network account update. ''' for acct in accts: real_acct = [a for a in self if a.id == acct.id][0] info = dict(name = acct.username, password = acct.password, protocol = acct.protocol, id=acct.id, **cPickle.loads(acct.data)) with real_acct.flagged(NETWORK_FLAG): real_acct.update_info(**info) real_acct.store_hash() self._all_acct_hash[real_acct.id] = real_acct._total_hash @contextmanager def accounts_flagged(self, flags): ''' convenience function, nests the context managers of all account types and flags each with these flags. ''' with nested(*[accts.accounts.flagged(flags) for accts in self._all_accounts.values()]): yield def release_accounts(self, autologin=False): ''' function to be called to apply all network account changes received from the network. ''' with self.acct_delay_lock: self.delay_accounts = False self.acct_calls.call_and_clear() import plugin_manager.plugin_hub as plugin_hub plugin_hub.act('digsby.accounts.released.async') if autologin and sys.opts.autologin_accounts: log.debug('doing autologin') self.autologin() def autologin(self): 'Auto login all accounts with autologin enabled.' for account in self: if is_im_account(account) and getattr(account, 'autologin', False): with_traceback(account.connect) def __iter__(self): #needs update to interleave based on overall order return itertools.chain(self.accounts, self.emailaccounts, self.socialaccounts) @property def reconnect(self): return pref('login.reconnect.attempt', False) @property def reconnect_times(self): return pref('login.reconnect.attempt_times', 5) def watch_account(self, acct): acct.add_observer(self.on_enabled_change, 'enabled') acct.add_observer(self.on_state_change, 'state') acct.add_observer(self.on_offline_change, 'offline_reason') def unwatch_account(self, acct): acct.remove_observer(self.on_enabled_change, 'enabled') acct.remove_observer(self.on_state_change, 'state') acct.remove_observer(self.on_offline_change, 'offline_reason') @util.callsback def disconnect_all(self, callback = None): ''' Call (D|d)isconnect on all accounts and set_enabled(False) if appropriate. After they all go to OFFLINE state, call callback.success. ''' self.disconnect_cb = callback for a in self.connected_accounts[:]: if a is not self.profile: log.debug(' im: Calling "disconnect" on %r', a) with traceguard: a.disconnect() for a in self.emailaccounts: with traceguard: if a.state != a.Statuses.OFFLINE: log.debug(' email: Calling "disconnect", "set_enabled(False)" on %r', a) a.set_enabled(False) a.disconnect() for a in self.socialaccounts: with traceguard: if a.state != a.Statuses.OFFLINE: log.debug(' social: Calling "Disconnect", "set_enabled(False)" on %r', a) a.set_enabled(False) a.Disconnect() self._check_all_offline() def all_active_accounts(self): ''' Like self.connected_accounts but also for email and social accounts ''' accts = self.connected_accounts[:] try: accts.remove(self.profile) except ValueError: pass return accts def _check_all_offline(self): if getattr(self, 'disconnect_cb', None) is not None: active = self.all_active_accounts() if not active: # This attribute is ONLY set when disconnect_all is called. dccb, self.disconnect_cb = self.disconnect_cb, None log.debug('All accounts disconnected, calling disconnect callback: %r', dccb.success) import wx wx.CallAfter(dccb.success) else: log.debug('All accounts not disconnected yet, remaining = %r', active) def on_state_change(self, src, attr, old, new): assert attr in ('state', None) hooks.notify('account.state', src, new) # Update "connected_accounts" list conn = [a for a in self.accounts + [self.profile] if a.connected] if self.connected_accounts != conn: self.connected_accounts[:] = conn if new != StateMixin.Statuses.OFFLINE: if src in self.cancellers: self.cancellers.pop(src).cancel() x = self.reconnect_timers.pop(src,None) if x is not None: x.stop() if new == StateMixin.Statuses.OFFLINE: self._on_account_offline(src) self._check_all_offline() if new == StateMixin.Statuses.ONLINE: src.error_count = 0 # for IM accounts signing on, set their profile. if src in self.accounts: self.profile.set_formatted_profile(src.connection) def _on_account_offline(self, src): ''' Notifies the buddylist sorter than an account is now offline. ''' sorter = getattr(self.profile.blist, 'new_sorter', None) if sorter is None: return if is_im_account(src) or src is self.profile: log.info('informing the sorter that (%r, %r) went offline', src.username, src.protocol) on_thread('sorter').call(sorter.removeAccount, src.username, src.protocol) def on_offline_change(self, src, attr, old, new): accounts_debug('%s\'s %s changed from %s to %s', src, attr, old, new) assert attr in ('offline_reason', None) attr = 'offline_reason' if new is None: new = getattr(src, attr) Reasons = StateMixin.Reasons conditions = (old == new, # no change...this function shouldn't have been called in the first place new == StateMixin.Reasons.NONE, # normal offline state, doesn't matter ) if any(conditions): return log.debug('%s offline reason: %r->%r', src, old, new) if getattr(Reasons, 'WILL_RECONNECT', None) in (new, old): # something we set - ignore for now # new means we set it lower down in this function, old means we're moving out of this state, which should # not be an error. log.debug('Skipping the rest because reason is WILL_RECONNECT') return if new == getattr(Reasons, 'BAD_PASSWORD', None) and src is self.profile: if not self.profile.has_authorized: log.debug('Wrong password for digsbyprofile - not going to reconnect') return else: new = None if src is self.profile and not self.profile.loaded: log.debug('DigsbyProfile has never connected, not reconnecting after %s state.', new) return if (is_im_account(src) or src is self.profile) and new not in (Reasons.BAD_PASSWORD, Reasons.NO_MAILBOX, Reasons.OTHER_USER, Reasons.RATE_LIMIT, Reasons.SERVER_ERROR): maxerror = (pref('%s.max_error_tolerance' % src.protocol, False) or getattr(src, 'max_error_tolerance', False) or pref('login.max_error_tolerance', False) or 4 ) count = src.error_count src.error_count += 1 log.info('%s\'s error_count is now %d.', src, src.error_count,) if (self.reconnect or src is self.profile): #and count < maxerror: if src in self.reconnect_timers: src.error_count -= 1 # account is already scheduled for a reconnect return src.setnotifyif('offline_reason', Reasons.WILL_RECONNECT) # schedule/attempt reconnect reconnect_time = get((1,10,30,300), count, 300) if src in self.accounts or src is self.profile: profile_on_return = False if src is self.profile: log.critical('Going to try to reconnect the digsbyprofile. This could get interesting...') reconnect_time, profile_on_return = self.get_profile_reconnect_time() def rct(): log.info('Reconnecting %s...', src) try: log.warning('src=%r...setting on_connect to change_state', src) if src is self.profile: def set_online(*a, **k): src.connection.setnotify('state', StateMixin.Statuses.ONLINE) src.on_connect = set_online if getattr(src, 'connection', None) is None: src._reconnect() else: log.error('There was already a connection for this account that was supposed to reconnect: %r', src) except Exception, e: log.critical('Error while trying to reconnect %s (error was: %r)', src, e) traceback.print_exc() x = self.reconnect_timers.pop(src,None) if x is not None: x.stop() log.info('Starting reconnect timer for %s. Will reconnect in %d seconds %r', src, reconnect_time, self.state_desc(src)) self.reconnect_timers[src] = rct_timer = call_later(reconnect_time, threaded(rct)) if profile_on_return: def reconnect_profile_now(*a, **k): rct_timer.done_at = 0 wakeup_timeout_thread() self.profile.OnReturnFromIdle += reconnect_profile_now return else: assert isinstance(src, UpdateMixin) # this is a social or email account -- it has its own timers and things # and will attempt the next update when appropriate return log.info('Error count too high, or reconnect disabled.') elif not is_im_account(src): log.info('%r is not an IM account. skipped a bunch of error_count/reconnect stuff.', src)