class InteropArray(ServiceBase): @srpc(Array(Integer), _returns=Array(Integer)) def echo_integer_array(ia): return ia @srpc(Array(String), _returns=Array(String)) def echo_string_array(sa): return sa @srpc(Array(DateTime), _returns=Array(DateTime)) def echo_date_time_array(dta): return dta @srpc(Array(Float), _returns=Array(Float)) def echo_float_array(fa): return fa @srpc(Array(Double), _returns=Array(Double)) def echo_double_array(da): return da @srpc(Array(Boolean), _returns=Array(Boolean)) def echo_boolean_array(ba): return ba @srpc(Boolean(max_occurs="unbounded"), _returns=Boolean(max_occurs="unbounded")) def echo_simple_boolean_array(ba): return ba @srpc(Array(Boolean), _returns=Array(Array(Boolean))) def echo_array_in_array(baa): return baa
class EmailBodyValue(ComplexModel): _type_info = [ ('value', Unicode( doc="String The value of the body part after decoding " "Content-Transfer-Encoding and the Content-Type charset, " "if both known to the server, and with any CRLF replaced with " "a single LF. The server MAY use heuristics to determine the " "charset to use for decoding if the charset is unknown, " "no charset is given, or it believes the charset given is " "incorrect. Decoding is best effort; the server SHOULD insert " "the unicode replacement character (U+FFFD) and continue when " "a malformed section is encountered.\n\n" "Note that due to the charset decoding and line ending " "normalisation, the length of this string will probably not " "be exactly the same as the size property on the " "corresponding EmailBodyPart." )), ('is_encoding_problem', M(Boolean( sub_name='isEncodingProblem', default=False, doc="(default: false) This is true if malformed sections " "were found while decoding the charset, or the charset was " "unknown, or the content-transfer-encoding was unknown.", ))), ('is_truncated', M(Boolean( sub_name='isTruncated', default=False, doc="(default: false) This is true if the value has been " "truncated.", ))), ]
class BSL_parametersVraagSynchroon(ComplexModel): __namespace__ = STUF_XML_NS __type_name__ = 'BSL_parametersVraagSynchroon' _type_info = ( ('sortering', simple_types.BSL_sortering), ('indicatorVervolgvraag', Boolean.customize(default=False)), ('maximumAantal', simple_types.MaximumAantal.customize(min_occurs=0)), ('indicatorAfnemerIndicatie', Boolean.customize(default=False, min_occurs=0)), ('indicatorAantal', Boolean.customize(default=False, min_occurs=0)), ) ordering = BSLSortering
class ApplicationConfig(ComplexModel): create_schema = Boolean(no_file=True) """Create database schema.""" bootstrap = Boolean(no_file=True) """Insert initial data to the database.""" generate_data = Boolean(no_file=True) """Fill the database with test data.""" write_interface_documents = Boolean(no_file=True) """Write the interface documents to the given output directory."""
class TimeSegment(ComplexModel, SegmentBase): _SEGMENT_RE = re.compile( u"([\\[\\]])" u"([0-9:\\.]+)" u"," u"([0-9:\\.]+)" u"([\\[\\]])", re.DEBUG | re.UNICODE) _type_info = [ ('start_inclusive', M(Boolean(default=True))), ('start', M(Time)), ('end', M(Time)), ('end_inclusive', M(Boolean(default=True))), ]
class ParametersAntwoordSynchroon(ComplexModel): """ Type voor gebruik in La01, La07, La09, La11 en La13 berichten. """ __namespace__ = STUF_XML_NS __type_name__ = 'ParametersAntwoordSynchroon' # As with VraagParameters, indicatorVervolgvraag and indicatorAfnemerIndicatie are # XML booleans _type_info = ( ('indicatorVervolgvraag', Boolean.customize(default=False)), # "er is J/N afnemerindicatie geplaatst (default N)" ('indicatorAfnemerIndicatie', Boolean.customize(default=False)), # "het aantal objecten dat aan de selectie voldoet" - non-negative integer ('aantalVoorkomens', simple_types.AantalVoorkomens.customize(min_occurs=0)), )
class Authentiek(ComplexModel): __namespace__ = BG_XML_NS __type_name__ = 'authentiek' _type_info = [ ('data', XmlData(Unicode)), ('metagegeven', XmlAttribute(Boolean.customize(__namespace__=STUF_XML_NS, min_occurs=1), ns=STUF_XML_NS)), ]
class ReceiveItemMasterOutput(ComplexModel): __namespace__ = NAMESPACE _type_info = [ ('receiveItemMasterResult', Boolean(min_occurs=1, nillable=False)), ('responseHeader', ResponseHeader), ]
class ZAK_parametersVraagSynchroon(ComplexModel): __namespace__ = STUF_XML_NS __type_name__ = 'ZAK_parametersVraagSynchroon' _type_info = ( # http://docplayer.nl/3947220-Cursus-stuf-maarten-van-den-broek-messagedesign.html # See ordering.ZAKSortering ('sortering', simple_types.ZAK_sortering), # "het gaat J/N om een vervolgvraag (default N)" ('indicatorVervolgvraag', Boolean.customize(default=False)), ('maximumAantal', simple_types.MaximumAantal.customize(min_occurs=0)), # TODO [TECH]: Taiga #151 Dit geeft aan dat we van updates op de hoogte gehouden willen worden. Niet geimplementeerd. # "plaats J/N afnemerindicatie voor geselecteerde objecten (default N)" ('indicatorAfnemerIndicatie', Boolean.customize(default=False, min_occurs=0)), # "geef aantal voorkomens terug dat voldoet aan selectie" ('indicatorAantal', Boolean.customize(default=False, min_occurs=0)), ) ordering = ZAKSortering
class DatabaseConfig(ComplexModel): name = Unicode """Database name. Must be a valid python variable name.""" type = Unicode(values=['sqlalchemy']) """Connection type. Only 'sqlalchemy' is supported.""" conn_str = Unicode """Connection string. See SQLAlchemy docs for more info.""" pool_size = UnsignedInteger(default=10) """Max. number of connections in the the db conn pool.""" show_queries = Boolean(default=False) """Logs sql queries.""" show_results = Boolean(default=False) """Logs sql queries as well as their results."""
class DaemonConfig(ComplexModel): SECTION_NAME = 'basic' daemonize = Boolean(default=False) """Fork the process to the background.""" log_file = AbsolutePath """Log file.""" pid_file = AbsolutePath """File that will contain the pid of the daemon process.""" config_file = AbsolutePath """Alternative configuration file..""" uid = SystemUser """Daemon will drop privileges and switch to this uid when specified""" gid = SystemGroup """Daemon will drop privileges and switch to this gid when specified""" log_level = Unicode(values=['DEBUG', 'INFO'], default='DEBUG') """Logging level""" show_rpc = Boolean(default=False) """Log raw request and response data.""" secret = ByteArray(default_factory=lambda: [os.urandom(64)], no_cmdline=True) """Cookie encryption key. Keep secret.""" thread_min = UnsignedInteger(default=3) """Min number of threads in the thread pool""" thread_max = UnsignedInteger(default=10) """Max number of threads in the thread pool""" listeners = Array(ListenerConfig)
class StaticFileServer(HttpApplication): path = String list_contents = Boolean(default=False) def __init__(self, *args, **kwargs): # We need the default ComplexModelBase ctor and not HttpApplication's # custom ctor here ComplexModelBase.__init__(self, *args, **kwargs) def gen_resource(self): from twisted.web.static import File from twisted.web.resource import ForbiddenResource if self.list_contents: return File(abspath(self.path)) class StaticFile(File): def directoryListing(self): return ForbiddenResource() return StaticFile(abspath(self.path))
class MailCapabilities(ComplexModel): _type_info = [ ('max_mailboxes_per_email', UnsignedInteger( sub_name='maxMailboxesPerEmail', doc="The maximum number of Mailboxes (see " "Section 2) that can be can assigned to a single Email " "object (see Section 4). This MUST be an integer >= 1, " "or null for no limit (or rather, the limit is always the " "number of Mailboxes in the account).")), ('max_mailbox_depth', UnsignedInteger( sub_name='maxMailboxDepth', doc="The maximum depth of the Mailbox hierarchy " "(i.e., one more than the maximum number of ancestors a " "Mailbox may have), or null for no limit.")), ('max_size_mailbox_name', M( UnsignedInteger( sub_name='maxSizeMailboxName', doc="The maximum length, in (UTF-8) octets, allowed " "for the name of a Mailbox. This MUST be at least 100, " "although it is recommended servers allow more."))), ('max_size_attachments_per_email', M( UnsignedInteger( sub_name='maxSizeAttachmentsPerEmail', doc="The maximum total size of attachments, " "in octets, allowed for a single Email object. A server MAY " "still reject the import or creation of an Email with a " "lower attachment size total (for example, if the body " "includes several megabytes of text, causing the size of the " "encoded MIME structure to be over some server-defined " "limit).\n\n" "Note that this limit is for the sum of unencoded attachment " "sizes. Users are generally not knowledgeable about encoding " "overhead, etc., nor should they need to be, so marketing " "and help materials normally tell them the “max size " "attachments”. This is the unencoded size they see on their " "hard drive, so this capability matches that and allows the " "client to consistently enforce what the user understands as " "the limit.\n\n" "The server may separately have a limit for the total size " "of the message [@!RFC5322], created by combining the " "attachments (often base64 encoded) with the message headers " "and bodies. For example, suppose the server advertises " "maxSizeAttachmentsPerEmail: 50000000 (50 MB). The enforced " "server limit may be for a message size of 70000000 octets. " "Even with base64 encoding and a 2 MB HTML body, " "50 MB attachments would fit under this limit."))), ('email_query_sort_options', Array(Unicode, sub_name='emailQuerySortOptions', doc="A list of all the values the server supports for " "the “property” field of the Comparator object in an " "Email/query sort (see Section 4.4.2). This MAY include " "properties the client does not recognise (for example, " "custom properties specified in a vendor extension). Clients " "MUST ignore any unknown properties in the list.")), ('may_create_top_level_mailbox', Boolean(sub_name='mayCreateTopLevelMailbox', doc="If true, the user may create a Mailbox (see Section " "2) in this account with a null parentId. (Permission for " "creating a child of an existing Mailbox is given by the " "myRights property on that Mailbox.)")), ]
class ParametersVraag_gzdb(ComplexModel): __namespace__ = STUF_XML_NS __type_name__ = 'ParametersVraag_gzdb' _type_info = ( ('indicatorAfnemerIndicatie', Boolean.customize(default=False, min_occurs=0)), )
class Daemon(ComplexModel): """A couple of neurons.""" LOGGING_DEVEL_FORMAT = "%(module)-15s | %(message)s" LOGGING_PROD_FORMAT = "%(asctime)s | %(module)-8s | %(message)s" _type_info = [ ('uuid', Uuid(no_cli=True, help="Daemon uuid. Regenerated every time a new " "config file is written. It could come in handy.")), ('secret', ByteArray(no_cli=True, help="Secret key for signing cookies " "and other stuff.")), ('daemonize', Boolean(default=False, help="Daemonizes before everything else.")), ('uid', Unicode(help="The daemon user. You need to start the server as" " a priviledged user for this to work.")), ('gid', Unicode(help="The daemon group. You need to start the server as" " a priviledged user for this to work.")), ('pid_file', String(help="The path to a text file that contains the pid" "of the daemonized process.")), ('logger_dest', String( help="The path to the log file. The server won't" " daemonize without this. Converted to an absolute path if not.") ), ('log_rpc', Boolean(help="Log raw rpc data.")), ('log_queries', Boolean(help="Log sql queries.")), ('log_results', Boolean(help="Log query results in addition to queries" "themselves.")), ('main_store', Unicode(help="The name of the store for binding " "neurons.TableModel's metadata to.")), ('bootstrap', Boolean(help="Bootstrap the application. Create schema, " "insert initial data, etc.", no_config=True)), ('_services', Array(Service, sub_name='services')), ('_stores', Array(StorageInfo, sub_name='stores')), ('_loggers', Array(Logger, sub_name='loggers')), ] # FIXME: we need this atrocity with custom constructor and properties # because spyne doesn't support custom containers def __init__(self, *args, **kwargs): super(Daemon, self).__init__(*args, **kwargs) services = kwargs.get('services', None) if services is not None: self.services = services if not hasattr(self, 'services') or self.services is None: self.services = _Twrdict('name')() stores = kwargs.get('stores', None) if stores is not None: self.stores = stores if not hasattr(self, 'stores') or self.stores is None: self.stores = _wdict() loggers = kwargs.get('loggers', None) if loggers is not None: self.loggers = loggers if not hasattr(self, 'loggers') or self.loggers is None: self.loggers = _wdict() @property def _services(self): if self.services is not None: for k, v in self.services.items(): v.name = k return self.services.values() self.services = _Twrdict('name')() return [] @_services.setter def _services(self, what): self.services = what if what is not None: self.services = _Twrdict('name')([(s.name, s) for s in what]) @property def _stores(self): if self.stores is not None: for k, v in self.stores.items(): v.name = k return self.stores.values() self.stores = _wdict() return [] @_stores.setter def _stores(self, what): self.stores = what if what is not None: self.stores = _wdict([(s.name, s) for s in what]) @property def _loggers(self): if self.loggers is not None: for k, v in self.loggers.items(): v.name = k return self.loggers.values() self.loggers = _wdict() return [] @_loggers.setter def _loggers(self, what): self.loggers = what if what is not None: self.loggers = _wdict([(s.path, s) for s in what]) @classmethod def get_default(cls, daemon_name): return cls( uuid=uuid1(), secret=os.urandom(64), _stores=[ Relational( name="sql_main", backend="sqlalchemy", pool_size=10, pool_recycle=3600, pool_timeout=30, max_overflow=3, conn_str='postgres://postgres:@localhost:5432/%s_%s' % (daemon_name, getpass.getuser()), sync_pool=True, async_pool=True, ), ], main_store='sql_main', _loggers=[ Logger(path='.', level='DEBUG', format=cls.LOGGING_DEVEL_FORMAT), ], ) def apply_logging(self): # We're using twisted logging only for IO. from twisted.python.logger import FileLogObserver from twisted.python.logger import Logger, LogLevel, globalLogPublisher LOGLEVEL_TWISTED_MAP = { logging.DEBUG: LogLevel.debug, logging.INFO: LogLevel.info, logging.WARN: LogLevel.warn, logging.ERROR: LogLevel.error, logging.CRITICAL: LogLevel.critical, } class TwistedHandler(logging.Handler): def emit(self, record): assert isinstance(record, logging.LogRecord) Logger(record.name).emit(LOGLEVEL_TWISTED_MAP[record.levelno], log_text=self.format(record)) if self.logger_dest is not None: from twisted.python.logfile import DailyLogFile self.logger_dest = abspath(self.logger_dest) if access(dirname(self.logger_dest), os.R_OK | os.W_OK): log_dest = DailyLogFile.fromFullPath(self.logger_dest) else: Logger().warn("%r is not accessible. We need rwx on it to " "rotate logs." % dirname(self.logger_dest)) log_dest = open(self.logger_dest, 'wb+') formatter = logging.Formatter(self.LOGGING_PROD_FORMAT) else: formatter = logging.Formatter(self.LOGGING_DEVEL_FORMAT) log_dest = open('/dev/stdout', 'wb+') try: import colorama colorama.init() logger.debug("colorama loaded.") except Exception as e: logger.debug("coloarama not loaded: %r" % e) def record_as_string(record): if 'log_text' in record: return record['log_text'] + "\n" if 'message' in record: return record['message'] + "\n" if 'log_failure' in record: failure = record['log_failure'] return "%s: %s" % (failure.type, pformat(vars(failure.value))) return pformat(record) observer = FileLogObserver(log_dest, record_as_string) globalLogPublisher.addObserver(observer) handler = TwistedHandler() handler.setFormatter(formatter) logging.getLogger().addHandler(handler) for l in self._loggers or []: l.apply() if self.log_rpc or self.log_queries or self.log_results: logging.getLogger().setLevel(logging.DEBUG) if self.log_rpc: logging.getLogger('spyne.protocol').setLevel(logging.DEBUG) logging.getLogger('spyne.protocol.xml').setLevel(logging.DEBUG) logging.getLogger('spyne.protocol.dictdoc').setLevel(logging.DEBUG) if self.log_queries: logging.getLogger('sqlalchemy').setLevel(logging.INFO) if self.log_results: logging.getLogger('sqlalchemy').setLevel(logging.DEBUG) def sanitize(self): if self.logger_dest is not None: self.logger_dest = abspath(self.logger_dest) if self.pid_file is not None: self.pid_file = abspath(self.pid_file) def apply(self, for_testing=False): """Daemonizes the process if requested, then sets up logging and data stores. """ # FIXME: apply_storage could return a deferred due to txpool init. # Daemonization won't work if twisted is imported before fork(). # It's best to know this in advance or you'll have to deal with daemons # that work perfectly well in development environments but won't boot # in production ones, solely because of fork()ingw. assert for_testing or not ('twisted' in sys.modules), \ "Twisted is already imported!" self.sanitize() if self.daemonize: assert self.logger_dest, "Refusing to start without any log output." daemonize() self.apply_logging() if self.pid_file is not None: pid = os.getpid() with open(self.pid_file, 'w') as f: f.write(str(pid)) logger.debug("Pid file is at: %r", self.pid_file) self.apply_storage() def apply_storage(self): for store in self._stores or []: try: store.apply() except Exception as e: logger.exception(e) raise if self.main_store == store.name: engine = store.itself.engine import neurons neurons.TableModel.Attributes.sqla_metadata.bind = engine @classmethod def parse_config(cls, daemon_name, argv=None): _apply_custom_attributes(cls) retval = cls.get_default(daemon_name) file_name = abspath('%s.yaml' % daemon_name) argv_parser = spyne_to_argparse(cls) cli = {} if argv is not None and len(argv) > 1: cli = dict(argv_parser.parse_args(argv[1:]).__dict__.items()) if cli['config_file'] is not None: file_name = abspath(cli['config_file']) del cli['config_file'] exists = isfile(file_name) and os.access(file_name, os.R_OK) if exists: retval = yaml_loads(open(file_name).read(), cls, validator='soft', polymorphic=True) else: if not access(dirname(file_name), os.R_OK | os.W_OK): raise Exception("File %r can't be created in %r" % (file_name, dirname(file_name))) for k, v in cli.items(): if not v in (None, False): setattr(retval, k, v) retval.config_file = file_name return retval def write_config(self): open(self.config_file, 'wb').write( get_object_as_yaml(self, self.__class__, polymorphic=True))
class Email(ComplexModel): _type_info = [ # # Metadata # ('id', JmapId( sub_name='id', doc="(immutable; server-set) The id of the Email object. Note " "that this is the JMAP object id, NOT the Message-ID header " "field value of the message [@!RFC5322]." )), ('blob_id', JmapId( sub_name='blobId', doc="(immutable; server-set) The id representing the raw " "octets of the message [@!RFC5322] for this Email. This may " "be used to download the raw original message or to attach it " "directly to another Email, etc." )), ('thread_id', JmapId( sub_name='threadId', doc="(immutable; server-set) The id of the Thread to which " "this Email belongs." )), ('mailbox_ids', AnyDict( # this is supposed to be a JmapId: bool dict sub_name='mailboxIds', doc="The set of Mailbox ids this Email belongs to. An " "Email in the mail store MUST belong to one or more Mailboxes " "at all times (until it is destroyed). The set is represented " "as an object, with each key being a Mailbox id. The value " "for each key in the object MUST be true." )), ('keywords', AnyDict( # this is supposed to be a str: bool dict sub_name='keywords', doc="(default: {}) A set of keywords that apply " "to the Email. The set is represented as an object, with the " "keys being the keywords. The value for each key in the " "object MUST be true." )), ('size', UnsignedInteger( sub_name='size', doc="(immutable; server-set) The size, in octets, " "of the raw data for the message [@!RFC5322] (as referenced " "by the blobId, i.e., the number of octets in the file the " "user would download)." )), ('received_at', UtcDate( default_factory=lambda: datetime.utcnow().replace(tzinfo=pytz.utc) .isoformat(), sub_name='receivedAt', doc="(immutable; default: time of creation on server) The " "date the Email was received by the message store. This is " "the internal date in IMAP [@?RFC3501]." )), # # Header fields # ('message_id', Array(Unicode, sub_name='messageId', doc="(immutable) The value is identical to the " "value of header:Message-ID:asMessageIds. For messages " "conforming to RFC 5322 this will be an array with a single " "entry." )), ('in_reply_to', Array(Unicode, sub_name='inReplyTo', doc="(immutable) The value is identical to the " "value of header:In-Reply-To:asMessageIds." )), ('references', Array(Unicode, sub_name='references', doc="(immutable) The value is identical to the " "value of header:References:asMessageIds." )), ('sender', Array(EmailAddress, sub_name='sender', doc="(immutable) The value is identical to " "the value of header:Sender:asAddresses." )), ('from_', Array(Unicode, sub_name='from', doc="(immutable) The value is identical to " "the value of header:From:asAddresses." )), ('to', Array(Unicode, sub_name='to', doc="(immutable) The value is identical to " "the value of header:To:asAddresses." )), ('cc', Array(Unicode, sub_name='cc', doc="(immutable) The value is identical to " "the value of header:Cc:asAddresses." )), ('bcc', Array(Unicode, sub_name='bcc', doc="(immutable) The value is identical to " "the value of header:Bcc:asAddresses." )), ('reply_to', Unicode( sub_name='replyTo', doc="(immutable) The value is identical to " "the value of header:Reply-To:asAddresses." )), ('subject', Unicode( sub_name='subject', doc="(immutable) The value is identical to the value " "of header:Subject:asText." )), ('sent_at', DateTime( sub_name='sentAt', doc="(immutable; default on creation: current server " "time) The value is identical to the value of " "header:Date:asDate." )), # # Body Parts # ('body_structure', Array(EmailBodyPart, sub_name='bodyStructure', doc="(immutable) This is the full MIME structure of the message " "body, without recursing into message/rfc822 or message/global " "parts. Note that EmailBodyParts may have subParts if they " "are of type multipart." )), ('body_values', AnyDict( sub_name='bodyValues', doc="(immutable) This is a map of partId to an EmailBodyValue " "object for none, some, or all text parts. Which parts are " "included and whether the value is truncated is determined " "by various arguments to Email/get and Email/parse." )), ('text_body', Array(EmailBodyPart, sub_name='textBody', doc="(immutable) A list of text/plain, text/html, image, audio, " "and/or video parts to display (sequentially) as the message " "body, with a preference for text/plain when alternative " "versions are available." )), ('html_body', Array(EmailBodyPart, sub_name='htmlBody', doc="(immutable) A list of text/plain, text/html, image, audio, " "and/or video parts to display (sequentially) as the message " "body, with a preference for text/html when alternative " "versions are available." )), ('attachments', Array(EmailBodyPart, sub_name='attachments', doc="(immutable) A list, traversing depth-first, " "of all parts in bodyStructure that satisfy either of the " "following conditions:" )), ('has_attachment', M(Boolean( sub_name='hasAttachment', default=False, doc="(immutable; server-set) This is true if there are " "one or more parts in the message that a client UI should " "offer as downloadable. A server SHOULD set hasAttachment to " "true if the attachments list contains at least one item that " "does not have Content-Disposition: inline. The server MAY " "ignore parts in this list that are processed automatically " "in some way or are referenced as embedded images in one of " "the text/html parts of the message." ))), ('preview', Unicode(256, sub_name='preview', default=u'', doc="(immutable; server-set) A plaintext fragment of the " "message body. This is intended to be shown as a preview line " "when listing messages in the mail store and may be truncated " "when shown. The server may choose which part of the message " "to include in the preview; skipping quoted sections and " "salutations and collapsing white space can result in a more " "useful preview." )), ]