def clearcache(maxcache=MAXCACHE): ''' delete entries if CACHED size is greater than MAXCACHE >>> CACHED.update({':2': 'x' * 1024, ':1': 'y' * 64, ':3': 'z' * 2048}) >>> logging.debug('sum of lengths: %d', sum(map(len, ... [v for k, v in CACHED.items() if k.startswith(':')]))) >>> logging.info('doctest CACHED.keys(): %s', list(CACHED.keys())) >>> clearcache(100) >>> {k: v for k, v in CACHED.items() if k.startswith(':')} {':1': 'yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy'} >>> len(CACHED[':1']) 64 ''' logging.debug('CACHED.keys(): %s', list(CACHED.keys())) while sum(map(len, [v for k, v in CACHED.items() if k.startswith(':') ])) > maxcache: next_biggest = max( {k: v for k, v in CACHED.items() if k.startswith(':')}, key=lambda k: len(CACHED[k])) logging.debug('next_biggest: %s', next_biggest) logging.warning('deleting CACHED[%s] of length %d', next_biggest, len(CACHED[next_biggest])) del CACHED[next_biggest]
def update(): ''' process xhr request for update to posts or messages ''' name, hashed = args.get('name', None), args.get('hash', None) update_status = status # default from outer variable if name in ('messages', 'posts'): # pylint: disable=eval-used # check outer variables # must be done before eval or it will fail logging.debug('messages: ...%s', messages[-128:]) logging.debug('messages_hash: %s', messages_hash) logging.debug('posts: %s...', posts[:128]) logging.debug('posts_hash: %s', posts_hash) if hashed and hashed != eval(name + '_hash'): update_page = eval(name).encode() elif hashed: logging.debug('%s unchanged', args['name']) update_page = b'' update_status = '304 Not Modified' else: logging.error('no hash passed to /update/') update_page = b'' update_status = '406 Not Acceptable' else: update_page = ( '<div>no updates for %s</div>' % args['name'] ).encode() update_status = '404 Not Found' return update_status, update_page
def __new__(cls, filename='', **kwargs): mapping = { subclass.classname: subclass for subclass in cls.__subclasses__() } logging.debug('mapping: %s, kwargs: %s', mapping, kwargs) if not kwargs: try: kwargs = json.loads(read(filename)) except TypeError: kwargs = {} logging.debug('cls.classname: %s', cls.classname) post_type = kwargs.get('type', cls.classname) kwargs['type'] = post_type # make sure it's there for __init__ if filename and post_type not in mapping: post_type = os.path.splitext(filename)[1].lstrip('.') subclass = mapping.get(post_type, cls) try: instance = super(BasePost, subclass).__new__(subclass) # fill in defaults from things unknown at script load time instance.versions['0.0.1']['author'].required = CACHED.get( 'username', True) instance.versions['0.0.1']['fingerprint'].required = CACHED.get( 'gpgkey', '')[-16:] or True except TypeError: logging.exception('Unknown post type %s', subclass) instance = None return instance
def validate_lambda(value): ''' helper function for values as lambda expression ''' logging.debug('validating lambda expression %s(%r)', self.values, value) return self.values(value)
def run_process(command, **kwargs): ''' implementation of subprocess.run for older Python3 https://pymotw.com/3/subprocess/ ''' text_input = kwargs.get('input', None) capture_output = kwargs.get('capture_output', False) logging.debug('capture_output %s ignored', capture_output) timeout = kwargs.get('timeout', None) check = kwargs.get('check', None) if timeout: raise NotImplementedError('"timeout" not supported') # pylint: disable=bad-option-value, consider-using-with process = subprocess.Popen( command, stdin=kwargs.get('stdin', subprocess.PIPE), stdout=kwargs.get('stdout', subprocess.PIPE), stderr=kwargs.get('stderr', subprocess.PIPE), # pylint: disable=bad-option-value, consider-using-dict-items **{k: kwargs[k] for k in kwargs if k not in ['input', 'capture_output', 'timeout', 'check']} ) stdout, stderr = process.communicate(text_input) if check and process.returncode: raise subprocess.CalledProcessError(process.returncode, command, output=(stdout, stderr)) return type('', (), { 'stdout': stdout, 'stderr': stderr, })
def decrypt(self, data, keyid=None): ''' gpg decrypt data ''' self.defaultkey = self.defaultkey or keyid command = ['gpg', '--decrypt'] if self.defaultkey: command.extend(['--default-key', self.defaultkey]) run = subprocess.run( command, input=data, capture_output=True, check=False) run.data = run.stdout logging.debug('decrypt stderr: %s', run.stderr) output = list(filter(None, run.stderr.decode().split('\n'))) logging.debug('looking for username and trust_text in %s', output[-1]) try: run.username, run.trust_text = re.compile( r'^gpg: Good signature from "([^"]+)" \[([^]]+)\]$').match( output[-1]).groups() except AttributeError: run.username = run.trust_text = None return run
def guess_mimetype(filename, contents): ''' guess and return mimetype based on name and/or contents ''' logging.debug('filename: %s, contents: %r', filename, contents[:32]) extension = os.path.splitext(filename)[1] mimetypes = { '.jpg': 'image/jpeg', '.css': 'text/css', } return mimetypes.get(extension, 'text/html')
def validate_pattern(value): ''' helper function for values as compiled regex ''' logging.debug('validating pattern %s matches value %r', self.values, value) try: return self.values.match(value) except TypeError as error: raise PostValidationError( '%r is wrong type for pattern match %s' % (value, self)) from error
def verify(self, signed): ''' verify signature on given signed data ''' run = subprocess.run(['gpg', '--verify'], input=signed, capture_output=True, check=False) output = run.stderr.decode().split('\n') combined = ' '.join(output) try: run.timestamp = re.compile( r'^gpg: Signature made (.*?)(?: using .*)?$').match( output[0]).groups()[0] logging.debug('run.timestamp: %s', run.timestamp) run.key_id = re.compile( r' using RSA key (?:ID )?([0-9A-F]{8,40})\s').search( combined).groups()[0] logging.debug('run.key_id: %s', run.key_id) pattern = re.compile( r' Good signature from "([^"]+)"(?: \[([^]]+)\])?') logging.debug('pattern: %s', pattern) run.username, run.trust_text = pattern.search(combined).groups() logging.debug('run.username: %s, run.trust_text: %s', run.username, run.trust_text) except (AttributeError, IndexError) as problem: logging.exception('did not find needed data in %r', combined) raise problem return run
def background(): ''' load and maintain cache communicate with other kybyz servers ''' delay = int(os.getenv('KB_DELAY') or 600) # seconds CACHED['ircbot'] = IRCBot(nickname=CACHED.get('username', None)) while True: logging.info('kybyz active %s seconds', CACHED['uptime'], **TO_PAGE) time.sleep(delay) # releases the GIL for `serve` CACHED['uptime'] += delay logging.debug('CACHED: %s, threads: %s', CACHED, threading.enumerate())
def verify_key(email): ''' fetch user's GPG key and make sure it matches given email address ''' gpgkey = None gpg = GPG() # pylint: disable=no-member verified = gpg.verify(gpg.sign('', keyid=email).data) logging.debug('verified: %s', verified) if not verified.username.endswith('<' + email + '>'): raise ValueError('%s no match for GPG certificate %s' % (email, verified.username)) gpgkey = verified.key_id return gpgkey
def sendchunk(self, chunk): ''' send a chunk. it must already be UTF8 encoded ''' sent, tries = False, 0 while not sent: try: self.client.send(chunk) sent = True tries = 0 # only abandon after several *consecutive* tries except BrokenPipeError: logging.debug('lost connection, waiting for monitor to rejoin') tries += 1 if tries == 5: raise time.sleep(3)
def publish(post_id, publish_to='all'): ''' send post out to network unencrypted if to='all', otherwise encrypted to each recipient with their own keys ''' posts = find_posts(KYBYZ_HOME, post_id) post_count = len(posts) if post_count != 1: raise ValueError('No posts matching %r' % post_id if post_count == 0 else 'Ambiguous suffix %r matches %s' % (posts)) recipients = publish_to.split(',') for recipient in recipients: logging.debug('recipient: %s', recipient) if recipient == 'all': send(CHANNEL, '-', read(posts[0])) else: send(recipient, recipient, read(posts[0]))
def loadposts(to_html=True, tries=0): ''' fetch and return all posts from KYBYZ_HOME or, if empty, from EXAMPLE setting to_html to True forces conversion from JSON format to HTML ''' logging.debug('running loadposts(%s)', to_html) if not get_posts(KYBYZ_HOME): if tries > 1: raise ValueError('No posts found after example posts cached') # populate KYBYZ_HOME from EXAMPLE for example in get_posts(EXAMPLE): post(None, read(example).decode()) return loadposts(to_html, tries=tries + 1) # now cache any that came in over the wire for index in range(len(POSTS_QUEUE)): # pylint: disable=unused-variable post(None, POSTS_QUEUE.popleft()) get_post = BasePost if to_html else read posts = [get_post(p) for p in get_posts(KYBYZ_HOME)] return sorted(filter(None, posts), key=lambda p: p.timestamp, reverse=True)
def cache(path, data): ''' store data in cache for later retrieval ''' fullpath = os.path.realpath(os.path.join(KYBYZ_HOME, path)) if not fullpath.startswith(os.path.realpath(KYBYZ_HOME) + os.sep): raise ValueError('Attempt to write %s outside of app bounds' % fullpath) os.makedirs(os.path.dirname(fullpath), exist_ok=True) binary = 'b' if isinstance(data, bytes) else '' try: with open(fullpath, 'x' + binary) as outfile: outfile.write(data) except FileExistsError: existing = read(fullpath) if data != existing: logging.error('Failed to update %s from %r to %r', fullpath, existing, data) else: logging.debug('% already cached', fullpath) return fullpath
def registration(): ''' get and return information on user, if any assume only one key for user's email address, for now. we should probably pick the one with the latest expiration date. ''' username = email = gpgkey = None links = [] if os.path.exists(KYBYZ_HOME): try: links.append(os.readlink(KYBYZ_HOME)) username = os.path.basename(links[-1]) links.append(os.readlink(links[-1])) email = os.path.basename(links[-1]) links.append(os.readlink(links[-1])) gpgkey = os.path.basename(links[-1]) logging.debug('links found: %s', links) except OSError: logging.exception('Bad registration at %s', links[-1]) return REGISTRATION(username, email, gpgkey)
def decrypt(message): ''' decrypt a message sent to me, and verify sender email ''' gpg = GPG() verified = decoded = b'' logging.debug('decoding %s...', message[:64]) try: decoded = b58decode(message) logging.debug('decrypting %r...', decoded[:64]) decrypted = gpg.decrypt(decoded) # pylint: disable=no-member verified = 'trust level %s' % decrypted.trust_text except ValueError: logging.warning('%r... not base58 encoded', message[:32]) decrypted = type('', (), {'data': message}) verified = 'unencoded' except subprocess.CalledProcessError as problem: logging.exception(problem) decrypted = type('', (), {'data': b''}) return decrypted.data, verified
def init(): ''' initialize application ''' logging.debug('beginning kybyz initialization') os.makedirs(CACHE, 0o700, exist_ok=True) CACHED.update(registration()._asdict()) if not CACHED['gpgkey']: username = os.getenv('KB_USERNAME', None) email = os.getenv('KB_EMAIL', None) if username and email: register(username, email) CACHED.update(registration()._asdict()) else: logging.error('need to set envvars KB_USERNAME and KB_EMAIL') CACHED['uptime'] = 0 CACHED['javascript'] = 'ERROR:javascript disabled or incompatible' logging.debug('CACHED: %s', CACHED) kybyz = threading.Thread(target=background, name='kybyz') kybyz.daemon = True kybyz.start()
def privmsg(self, target, message): ''' simulates typing a message in ircII with no preceding command target should be a channel name preceded by '#', or nick message should not have any embedded CRLFs, or non-ASCII characters. ''' sep = '\xa0' # separates prefix from message logging.debug('message: %r', message) testmsg = ' '.join( [CACHED['irc_id'], 'PRIVMSG', target, sep + message]) logging.debug('testmsg: %s', testmsg.replace(sep, ':')) if len(testmsg) <= 510: self.sendchunk(('PRIVMSG %s :%s\r\n' % (target, message)).encode()) else: pieces = testmsg[:510].split(sep) chunklength = len(pieces[-1]) for chunk in [ message[i:i + chunklength] for i in range(0, len(message), chunklength) ]: logging.debug('sending chunk %s', chunk) self.sendchunk( ('PRIVMSG %s %s\r\n' % (target, chunk)).encode())
def check_username(identifier): ''' identifier is :[email protected]' and CACHED['username'] == 'bleah' >>> CACHED['username'] = '******' >>> check_username(':[email protected]') ('bleah', True) >>> check_username(':[email protected]') ('blah', False) >>> check_username(':irc.lfnet.org') (None, None) ''' try: start = identifier.index(':') + 1 end = identifier.index('!') nickname = identifier[start:end] logging.debug('identifier: %s, start: %s, end: %s, check: %s', identifier, start, end, nickname) matched = CACHED.get('username', None) == nickname except ValueError: # ignore failure, because PINGs don't have username anyway #logging.error('cannot find nickname in %s', identifier) nickname = matched = None return nickname, matched
def send(recipient, email, *words): ''' encrypt, sign, and send a private message to recipient `recipient` is the 'nick' (nickname) of the user to whom you wish to send the message. `email` is not necessarily an email address, but is used to find the GPG key of the recipient. use `-` instead of email to send plain text ''' if len(words) > 1 or isinstance(words[0], str): text = ' '.join(words).encode() else: text = words[0] # as when called by `publish` logging.debug('words: %s', words) encoded = None if email != '-': gpg = GPG() logging.debug('message before encrypting: %s', text) encrypted = gpg.encrypt( text, # pylint: disable=no-member [email], sign=True, armor=False) logging.debug('encrypted: %r...', encrypted.data[:64]) encoded = b58encode(encrypted.data).decode() logging.debug('encoded: %s', encoded) if text and not encoded: if email == '-' or os.getenv('KB_SEND_PLAINTEXT_OK'): logging.warning('encryption %s, sending plaintext', 'bypassed' if email == '-' else 'failed') encoded = text.decode() else: logging.warning('encryption failed, run with ' 'KB_SEND_PLAINTEXT_OK=1 to send anyway') logging.warning('setting message to "(encryption failed)"') encoded = '(encryption failed)' CACHED['ircbot'].privmsg(recipient, encoded)
def validate(self): ''' make sure post contents fit the version given note that additional attributes can be given a post and they will not be checked; we only check the schema ''' if not self.__doc__: raise RuntimeError('Must not run with optimization') # why doesn't 'author' have default value from cache? logging.debug('BasePost.validate: CACHED: %s', CACHED) assert (getattr(self, 'type', None) == self.classname or getattr( self, 'filename', '').endswith('.' + self.classname)) schema = self.versions[self.version] logging.debug('post validation schema: %s', schema) for attribute in schema: logging.debug('validating attribute %s in schema', attribute) schema[attribute].validate(self)
def post(post_type, *args, returned='hashed', **kwargs): ''' make a new post from the command line or from another subroutine ''' if len(args) == 1 and JSON.match(args[0]): try: kwargs.update(json.loads(args[0])) except json.decoder.JSONDecodeError: logging.error('Post not valid JSON format: %s' % args[0]) else: logging.debug('args %s not valid JSON, using as key-value pairs', args) for arg in args: logging.debug('parsing %s', arg) kwargs.update(dict((arg.split('=', 1), ))) # override post_type if specified if post_type: kwargs.update({'type': post_type}) try: newpost = BasePost(None, **kwargs) jsonified = newpost.to_json() post_type = newpost.type hashed = kbhash(jsonified) cached = cache('.'.join((hashed, post_type)), jsonified) jsonified = newpost.to_json(for_hashing=True) hashed = kbhash(jsonified) hashcached = cache('.'.join((hashed, post_type)), jsonified) unadorned = os.path.splitext(hashcached)[0] try: os.symlink(cached, unadorned) except FileExistsError: existing = os.readlink(unadorned) if existing != cached: logging.warning('updating post %s to %s', unadorned, cached) os.unlink(unadorned) os.symlink(cached, unadorned) else: logging.debug('%s already symlinked to %s', unadorned, cached) return hashed if returned == 'hashed' else newpost except AttributeError: logging.exception('Post failed: attribute error') return None except TypeError: logging.exception('Post failed with kwargs: %s', kwargs) return None
def uwsgi_init(): ''' initialize uwsgi application ''' # pylint: disable=import-error, bad-option-value, import-outside-toplevel logging.debug('beginning kybyz uwsgi initialization') import uwsgi if os.getenv('ANDROID_ROOT') is None: import webbrowser else: webbrowser = type( '', (), {'open': lambda url: subprocess.call(['am', 'start', url])} ) port = host = None try: port = fromfd(uwsgi.sockets[0], AF_INET, SOCK_STREAM).getsockname()[1] host = 'localhost:%s' % port except AttributeError: logging.exception('cannot determine port') init() if not sys.stdin.isatty(): logging.info('running as background process, will not launch browser') else: if host is not None and not os.getenv('WSL'): logging.debug('opening browser window to %s', host) webbrowser.open('http://%s' % host) else: logging.exception('cannot open browser to %s', host) logging.info("if you're running under WSL (Windows Subsystem for" " Linux), just open Windows browser to %s", host) repl = threading.Thread(target=commandloop, name='repl') repl.daemon = True repl.start() logging.debug('uwsgi initialization complete')
def serve(env=None, start_response=None): ''' handle web requests ''' # pylint: disable=too-many-locals, too-many-statements fields = cgi.FieldStorage(fp=env.get('wsgi.input'), environ=env) args = {k: fields[k].value for k in fields} logging.debug('args: %s', args) #sections = ['posts', 'messages'] page = b'(Something went wrong)' env = env or {} requested = env.get('REQUEST_URI', None).lstrip('/') logging.debug('requested: "%s"', requested) status = '200 OK' headers = [('Content-type', 'text/html')] template = read('timeline.html').decode() messages = ''.join(['<div>%s</div>' % message for message in reversed(MESSAGE_QUEUE)]) messages_hash = md5(messages.encode()).hexdigest() messages = MESSAGES.format( messages=messages, messages_hash=messages_hash, javascript=CACHED['javascript']) posts = ''.join(['<div>%s</div>' % post for post in loadposts()]) posts_hash = md5(posts.encode()).hexdigest() posts = POSTS.format(posts=posts, posts_hash=posts_hash) navigation = NAVIGATION.format(navigation=''.join(['<h3>Navigation</h3>'])) # make helper functions for dispatcher def update(): ''' process xhr request for update to posts or messages ''' name, hashed = args.get('name', None), args.get('hash', None) update_status = status # default from outer variable if name in ('messages', 'posts'): # pylint: disable=eval-used # check outer variables # must be done before eval or it will fail logging.debug('messages: ...%s', messages[-128:]) logging.debug('messages_hash: %s', messages_hash) logging.debug('posts: %s...', posts[:128]) logging.debug('posts_hash: %s', posts_hash) if hashed and hashed != eval(name + '_hash'): update_page = eval(name).encode() elif hashed: logging.debug('%s unchanged', args['name']) update_page = b'' update_status = '304 Not Modified' else: logging.error('no hash passed to /update/') update_page = b'' update_status = '406 Not Acceptable' else: update_page = ( '<div>no updates for %s</div>' % args['name'] ).encode() update_status = '404 Not Found' return update_status, update_page if requested is not None and start_response: if requested == '': page = template.format( posts=posts, messages=messages, navigation=navigation, posts_hash=posts_hash, messages_hash=messages_hash, ).encode() elif os.path.exists(requested): page = read(requested) headers = [('Content-type', guess_mimetype(requested, page))] elif requested.startswith('update/'): # assume called by javascript, and thus that it's working CACHED['javascript'] = 'INFO:found compatible javascript engine' status, page = update() elif requested.startswith('ipfs/'): logging.debug('fetching uncached ipfs URL %s', requested) try: with urlopen('https://ipfs.io/' + requested) as request: page = request.read() headers = [ ('Content-type', guess_mimetype(requested, page)) ] cache(requested, page) except HTTPError as failed: headers = failed.headers status = ' '.join([str(failed.code), failed.msg]) page = b'<div>%s</div>' % status else: logging.warning('%s not found', requested) status = '404 Not Found' page = b'<div>not yet implemented</div>' # NOTE: page must be a bytestring at this point! logging.debug('starting response with status %s and page %s...', status, page[:128]) start_response(status, headers) return [page] logging.warning('serve: failing with env=%s and start_response=%s', env, start_response) return [b'']
def monitor(self): ''' wait for input. send a PONG for every PING intended to run in a daemon thread set ircbot.terminate to True in order to shut it down ''' logging.debug('ircbot monitoring incoming traffic') tries = 0 while tries < 10: try: received = self.stream.readline().rstrip() tries = 0 except ConnectionResetError: tries += 1 self.connect(self.server, self.port, self.nickname, self.realname) continue logging.info('received: %r, length: %d', received, len(received)) end_message = len(received) < 510 # make sure all words[n] references are accounted for words = received.split() + ['', '', ''] nickname, matched = check_username(words[0]) if words[0] == 'PING': pong = received.replace('I', 'O', 1).rstrip() + CRLF logging.info('sending: %r', pong) self.client.send(pong.encode()) elif words[1] == 'JOIN' and matched: CACHED['irc_id'] = words[0] logging.info("CACHED['irc_id'] = %s", CACHED['irc_id']) elif words[1] == 'PRIVMSG': sender = nickname privacy = 'public' if words[2] == CHANNEL else 'private' logging.info('%s message received from %s:', privacy, sender) # chop preceding ':' from ':this is a private message' CACHED[sender] += ' '.join(words[3:])[1:].rstrip() # try decoding what we have so far logging.debug('attempting to decode %s', CACHED[sender]) text, trustlevel = decrypt(CACHED[sender].encode()) logging.debug('text: %s, trustlevel: %s', text, trustlevel) if text or end_message: text = text or CACHED[sender][:256].encode() logging.info( '%s %s message from %s: %s', trustlevel, privacy, sender, text.decode().replace('<', '<').replace('>', '>'), **TO_PAGE) if JSON.match(CACHED[sender]): POSTS_QUEUE.append(CACHED[sender]) logging.debug('appended %r to POSTS_QUEUE', CACHED[sender]) else: logging.debug('Not JSON: %s', CACHED[sender]) CACHED[sender] = '' elif len(CACHED[sender]) > MAXSIZE: logging.info( 'clearing overflow CACHED[%s]: %r..., length %d', sender, CACHED[sender][:256], len(CACHED[sender])) CACHED[sender] = '' else: logging.debug('CACHED[%s] now %r', sender, CACHED[sender]) clearcache() logging.warning('ircbot terminated from launching thread')
def validate_none(value): # pylint: disable=unused-argument ''' helper function for value that can be anything ''' logging.debug('validating %r regardless of what it is', value) return True
class Post(BasePost): r''' encapsulation of kybyz post >>> str(Post(author='test', ... fingerprint='0000000000000000')) # doctest: +ELLIPSIS '<div class="post">\n...' ''' classname = 'post' class Netmeme(BasePost): ''' encapsulation of kybyz Internet meme (netmeme is my abbreviation) ''' classname = 'netmeme' class Kybyz(BasePost): ''' encapsulation of a "kybyz": a "thumbs-up" or other icon with optional text ''' classname = 'kybyz' if __name__ == '__main__': logging.debug('testing post') print(BasePost('example.kybyz/testmeme.json'))
def validate_tuple(value): ''' helper function for values tuple ''' logging.debug('validating that value %r in %s', value, self.values) return value in self.values
def validate(self, post): ''' make sure this attribute fits requirement NOTE: sets attribute in post if not present and has default value ''' def validate_tuple(value): ''' helper function for values tuple ''' logging.debug('validating that value %r in %s', value, self.values) return value in self.values def validate_pattern(value): ''' helper function for values as compiled regex ''' logging.debug('validating pattern %s matches value %r', self.values, value) try: return self.values.match(value) except TypeError as error: raise PostValidationError( '%r is wrong type for pattern match %s' % (value, self)) from error def validate_none(value): # pylint: disable=unused-argument ''' helper function for value that can be anything ''' logging.debug('validating %r regardless of what it is', value) return True def validate_lambda(value): ''' helper function for values as lambda expression ''' logging.debug('validating lambda expression %s(%r)', self.values, value) return self.values(value) validation_dispatcher = { type(None): validate_none, type(re.compile(r'^$')): validate_pattern, type(lambda: None): validate_lambda, type(()): validate_tuple, } required = self.required default = NoDefault if isinstance(required, tuple): evaluated = all( (getattr(post, attribute, None) for attribute in required)) logging.debug('tuple %s evaluated to required=%s', required, evaluated) required = evaluated elif required not in [True, False]: logging.debug('setting default for %s to %r', self.name, required) default = required required = True value = getattr(post, self.name, default) logging.debug('checking that %s value %r in %s', self.name, value, self.values) validation_dispatcher[type(self.values)](value) if value == NoDefault and required: raise PostValidationError('Post %r lacks valid %s attribute' % (post, self.name)) if value != NoDefault: logging.debug('setting attribute %s in post to %s', self.name, value) setattr(post, self.name, value) # default if nothing else else: logging.debug('attribute %s has value %r and no default value', self.name, value)