def auth_update_cache(self): if self.ctx.db.cache_storage == 'none': return False jid = self.username + '@' + self.domain now = self.now try: if self.ctx.db.cache_storage == 'memory': rounds = self.ctx.bcrypt_rounds[1] else: rounds = self.ctx.bcrypt_rounds[0] salt = bcrypt.gensalt(rounds=rounds) except TypeError: # Old versions of bcrypt() apparently do not support the rounds option salt = bcrypt.gensalt() pwhash = unutf8(bcrypt.hashpw(utf8(self.password), salt)) # Upsert in SQLite is too new to rely on: # https://www.sqlite.org/draft/lang_UPSERT.html # # INSERT OR REPLACE cannot be used, as it will inherit # the DEFAULT values instead of the existing values. self.ctx.db.cache.begin() self.ctx.db.cache.execute( '''INSERT OR IGNORE INTO authcache (jid, firstauth) VALUES (?, ?)''', (jid, now)) self.ctx.db.cache.execute( '''UPDATE authcache SET pwhash = ?, remoteauth = ?, anyauth = ? WHERE jid = ?''', (pwhash, now, now, jid)) self.ctx.db.cache.commit()
def roster_update_users(self, e, sr): '''Update users' full names and invert hash For all *users* we have information about: - collect the shared roster groups they belong to - set their full names if not yet defined Return inverted hash''' groups = {} commands = [] for user, desc in sr.items(): if 'groups' in desc: for g in desc['groups']: if g in groups: groups[g].append(user) else: groups[g] = [user] if 'name' in desc: lhs, rhs = self.jidsplit(user) fnc = utf8('FNC:' + user) # No unicode keys if fnc in self.ctx.shared_roster_db: cached_name = unutf8(self.ctx.shared_roster_db[fnc]) else: cached_name = None self.ctx.shared_roster_db[fnc] = utf8(desc['name']) cmd = e.maybe_set_fn(lhs, rhs, desc['name'], cached_name=cached_name) if cmd is not None: commands.append(cmd) return groups, commands
def execute(self, args): logging.debug(self.ctx.ejabberdctl_path + str(args)) try: return unutf8(subprocess.check_output([self.ctx.ejabberdctl_path] + args)) except subprocess.CalledProcessError as err: logging.warn('ejabberdctl %s failed with %s' % (self.ctx.ejabberdctl_path + str(args), str(err))) return None
def test_unutf8_invalid_illegal(): try: stderr = sys.stderr sys.stderr = io.StringIO() assertEqual(unutf8(b'Hall\x80chen', 'illegal'), u'illegal-utf8-sequence-Hallchen') finally: sys.stderr = stderr
def perform(args): domain_db = bsddb3.hashopen(args.domain_db, 'c', 0o600) if args.get: print(unutf8(domain_db[utf8(args.get)])) elif args.put: domain_db[utf8(args.put[0])] = args.put[1] elif args.delete: del domain_db[utf8(args.delete)] elif args.unload: for k in list(domain_db.keys()): print('%s\t%s' % (unutf8(k), unutf8(domain_db[k]))) # Should work according to documentation, but doesn't # for k, v in DOMAIN_DB.iteritems(): # print k, '\t', v elif args.load: for line in sys.stdin: k, v = line.rstrip('\r\n').split('\t', 1) domain_db[utf8(k)] = v domain_db.close()
def db_upgrade_domain(self, olddb): logging.debug('Upgrading domain from %s' % olddb) try: self.conn.execute('''CREATE TABLE domains (xmppdomain TEXT PRIMARY KEY, authsecret TEXT, authurl TEXT, authdomain TEXT, regcontact TEXT, regfirst TIMESTAMP DEFAULT CURRENT_TIMESTAMP, reglatest TIMESTAMP DEFAULT CURRENT_TIMESTAMP)''') except sqlite3.OperationalError as e: logging.warning( 'Cannot create `domains` table; maybe multiple processes started in parallel? %s' % str(e)) # Try to get out of the way of a parallel updater time.sleep(1) # Someone else already created the table; he probably also # migrated it return try: if olddb is None: return elif isinstance(olddb, str): db = bsddb3.hashopen(olddb, 'r') else: # dict db = olddb for k, v in db.items(): k = unutf8(k, 'illegal') v = unutf8(v, 'illegal') try: (authsecret, authurl, authdomain, extra) = v.split("\t", 3) except ValueError: (authsecret, authurl, authdomain) = v.split("\t", 2) extra = None self.conn.execute( '''INSERT INTO domains (xmppdomain, authsecret, authurl, authdomain) VALUES (?, ?, ?, ?)''', (k, authsecret, authurl, authdomain)) if isinstance(olddb, str): db.close() except bsddb3.db.DBError as e: logging.error('Trouble converting %s: %s' % (olddb, e))
def read_request(cls): field_no = 0 fields = [None, None, None, None] length_field = sys.stdin.buffer.read(2) while len(length_field) == 2: (size, ) = unpack('>H', length_field) val = sys.stdin.buffer.read(size) if len(val) != size: logging.warn('premature EOF while reading field %d: %d != %d' % (field_no, len(val), size)) return fields[field_no] = val field_no = (field_no + 1) % 4 if field_no == 0: logging.debug('from_saslauthd got %r, %r, %r, %r' % tuple(fields)) yield ('auth', unutf8(fields[0], 'illegal'), unutf8(fields[3], 'illegal'), unutf8(fields[1], 'illegal')) length_field = sys.stdin.buffer.read(2)
def post_200_ok_verify(url, data='', headers='', allow_redirects=False, timeout=5): assertEqual(url, 'https://nosuchhost') assertSortOf(unutf8(data), 'username=usr&operation=isuser&domain=no.such.doma.in', '&') hash = hmac.new(b'999', msg=data, digestmod=hashlib.sha1).hexdigest() assertEqual(headers['X-JSXC-SIGNATURE'], 'sha1=' + hash) return post_200_ok(url, data, headers, allow_redirects, timeout)
def read_request(cls): length_field = sys.stdin.buffer.read(2) while len(length_field) == 2: (size, ) = unpack('>H', length_field) if size == 0: logging.info('command length 0, treating as logical EOF') return cmd = sys.stdin.buffer.read(size) if len(cmd) != size: logging.warn('premature EOF while reading cmd: %d != %d' % (len(cmd), size)) return x = unutf8(cmd).split(':', 3) yield tuple(x) length_field = sys.stdin.buffer.read(2)
def db_upgrade_cache(self, olddb): logging.debug('Upgrading cache from %s' % olddb) try: if olddb is None: return elif isinstance(olddb, str): db = bsddb3.hashopen(olddb, 'r') else: # dict db = olddb for k, v in db.items(): k = unutf8(k, 'illegal').replace(':', '@') v = unutf8(v, 'illegal') (pwhash, ts1, tsv, tsa, rest) = v.split("\t", 4) ts1 = datetime.utcfromtimestamp(int(ts1)) tsv = datetime.utcfromtimestamp(int(tsv)) tsa = datetime.utcfromtimestamp(int(tsa)) # First import goes into persistent database self.conn.execute( '''INSERT INTO authcache (jid, pwhash, firstauth, remoteauth, anyauth) VALUES (?, ?, ?, ?, ?)''', (k, pwhash, ts1, tsv, tsa)) if isinstance(olddb, str): db.close() except bsddb3.db.DBError as e: logging.error('Trouble converting %s: %s' % (olddb, e))
def read_request(cls, infd, outfd): try: infd = infd.buffer except AttributeError: pass length_field = infd.read(2) while len(length_field) == 2: (size,) = unpack('>H', length_field) if size == 0: logging.info('command length 0, treating as logical EOF') return cmd = infd.read(size) if len(cmd) != size: logging.warn('premature EOF while reading cmd: %d != %d' % (len(cmd), size)) return x = unutf8(cmd).split(':', 3) yield tuple(x) length_field = infd.read(2)
def auth_update_cache(self): if '' in self.ctx.cache_db: # Cache disabled? return key = self.username + ':' + self.domain now = self.now # For tests snow = str(now) try: salt = bcrypt.gensalt(rounds=self.ctx.bcrypt_rounds) except TypeError: # Old versions of bcrypt() apparently do not support the rounds option salt = bcrypt.gensalt() pwhash = unutf8(bcrypt.hashpw(utf8(self.password), salt)) if key in self.ctx.cache_db: (ignored, ts1, tsv, tsa, rest) = self.ctx.cache_db[key].split("\t", 4) self.ctx.cache_db[key] = "\t".join((pwhash, ts1, snow, snow, rest)) else: self.ctx.cache_db[key] = "\t".join((pwhash, snow, snow, snow, '')) self.try_db_sync()
def test_unutf8_valid(): assertEqual(unutf8(b'Hall\xc3\xb6chen'), u'Hallöchen')
def test_unutf8_invalid_ignore3(): assertEqual(unutf8(b'Hall\x80chen', 'ignore'), u'Hallchen')
def test_unutf8_invalid_strict(): try: assertEqual(unutf8(b'Hall\x80chen', 'strict'), u'Hallchen') except UnicodeError: return raise AssertionError('Illegal UTF-8 sequence accepted under "strict"')
def test_unutf8_ascii(): assertEqual(unutf8(b'Hallo'), u'Hallo')
def db_upgrade_roster(self, olddb): logging.debug('Upgrading roster from %s' % olddb) try: self.conn.execute('''CREATE TABLE rosterinfo (jid TEXT PRIMARY KEY, fullname TEXT, grouplist TEXT, responsehash TEXT, last_update TIMESTAMP DEFAULT CURRENT_TIMESTAMP)''' ) except sqlite3.OperationalError as e: logging.warning( 'Cannot create `domains` table; maybe multiple processes started in parallel? %s' % str(e)) # Try to get out of the way of a parallel updater time.sleep(1) # Continue, in case a previous table creation was aborted try: self.conn.execute('''CREATE TABLE rostergroups (groupname TEXT PRIMARY KEY, userlist TEXT)''') except sqlite3.OperationalError as e: logging.warning( 'Cannot create `domains` table; maybe multiple processes started in parallel? %s' % str(e)) # Try to get out of the way of a parallel updater time.sleep(1) # Someone else already created the table; he probably also # migrated it return rosterinfo_fn = {} rosterinfo_rh = {} rosterinfo_lg = {} rosterusers = set([]) rostergroups = {} try: if olddb is None: return elif isinstance(olddb, str): db = bsddb3.hashopen(olddb, 'r') else: # dict db = olddb for k, v in db.items(): k = unutf8(k, 'illegal') v = unutf8(v, 'illegal') if k.startswith('FNC:'): # Full name (cache only) jid = k[4:].replace(':', '@') rosterusers.add(jid) if '@' in jid: # Do not copy malformed (old buggy) entries rosterinfo_fn[jid] = v if k.startswith('LIG:'): # Login In Group (state information) jid = k[4:].replace(':', '@') rosterusers.add(jid) rosterinfo_lg[jid] = v if k.startswith( 'RGC:'): # Reverse Group Cache (state information) gid = k[4:].replace(':', '@') rostergroups[gid] = v elif k.startswith('RH:'): # Response body hash (cache only) jid = k[3:].replace(':', '@') rosterusers.add(jid) rosterinfo_rh[jid] = v if isinstance(olddb, str): db.close() except bsddb3.db.DBError as e: logging.error('Trouble converting %s: %s' % (olddb, e)) rg = [] for k, v in rostergroups.items(): rg.append([k, v]) self.conn.executemany( 'INSERT INTO rostergroups (groupname, userlist) VALUES (?, ?)', rg) ri = [] for k in rosterusers: ri.append([ k, rosterinfo_fn[k] if k in rosterinfo_fn else None, rosterinfo_lg[k] if k in rosterinfo_lg else None, rosterinfo_rh[k] if k in rosterinfo_rh else None ]) self.conn.executemany( 'INSERT INTO rosterinfo (jid, fullname, grouplist, responsehash) VALUES (?, ?, ?, ?)', ri)