def test_adapt_by(self): self._testValidation(V.AdaptBy(hex, traps=TypeError), invalid=[1.2, "1"], adapted=[(255, "0xff"), (0, "0x0")]) self._testValidation(V.AdaptBy(int, traps=(ValueError, TypeError)), invalid=["12b", "1.2", {}, (), []], adapted=[(12, 12), ("12", 12), (1.2, 1)]) self.assertRaises(TypeError, V.AdaptBy(hex, traps=()).validate, 1.2)
def test_allof(self): self._testValidation(V.AllOf({"id": "integer"}, V.Mapping("string", "number")), valid=[{ "id": 3 }, { "id": 3, "bar": 4.5 }], invalid=[{ "id": 1.1, "bar": 4.5 }, { "id": 3, "bar": True }, { "id": 3, 12: 4.5 }]) self._testValidation(V.AllOf("number", lambda x: x > 0, V.AdaptBy(datetime.utcfromtimestamp)), adapted=[(1373475820, datetime(2013, 7, 10, 17, 3, 40))], invalid=["1373475820", -1373475820])
def __init__(self, allow_extra): schema = { '+id': int, '+client_name': V.String(max_length=255), '+sort_index': float, 'client_phone': V.Nullable(V.String(max_length=255)), 'location': {'latitude': float, 'longitude': float}, 'contractor': V.Range(V.AdaptTo(int), min_value=1), 'upstream_http_referrer': V.Nullable(V.String(max_length=1023)), '+grecaptcha_response': V.String(min_length=20, max_length=1000), 'last_updated': V.AdaptBy(dateutil.parser.parse), 'skills': V.Nullable( [ { '+subject': str, '+subject_id': int, '+category': str, '+qual_level': str, '+qual_level_id': int, 'qual_level_ranking': V.Nullable(float, default=0), } ], default=[], ), } self.validator = V.parse(schema, additional_properties=allow_extra)
def test_chainof(self): self._testValidation(V.ChainOf(V.AdaptTo(int), V.Condition(lambda x: x > 0), V.AdaptBy(datetime.utcfromtimestamp)), adapted=[(1373475820, datetime(2013, 7, 10, 17, 3, 40)), ("1373475820", datetime(2013, 7, 10, 17, 3, 40))], invalid=["nan", -1373475820])
def mark(env): def name(value): if isinstance(value, str): value = [value] return [v for v in value if v] schema = v.parse({ '+action': v.Enum(('+', '-', '=')), '+name': v.AdaptBy(name), '+ids': [int], 'old_name': v.AdaptBy(name), 'thread': v.Nullable(bool, False), 'last': v.Nullable(str) }) data = schema.validate(env.request.json) if not data['ids']: return 'OK' ids = tuple(data['ids']) if data['thread']: i = env.sql(''' SELECT id FROM emails WHERE thrid IN %s AND created <= %s ''', [ids, data['last']]) ids = tuple(r[0] for r in i) mark = ft.partial(syncer.mark, env, ids=ids, new=True) if data['action'] == '=': if data.get('old_name') is None: raise ValueError('Missing parameter "old_name" for %r' % data) if data['old_name'] == data['name']: return [] mark('-', set(data['old_name']) - set(data['name'])) mark('+', set(data['name']) - set(data['old_name'])) return 'OK' mark(data['action'], data['name']) return 'OK'
def get_conf(conf=None): if not conf: with open('conf.json', 'br') as f: conf = json.loads(f.read().decode()) exists = v.Condition(lambda v: Path(v).exists()) strip_slash = v.AdaptBy(lambda v: str(v).rstrip('/')) app_dir = Path(__file__).parent.resolve() base_dir = app_dir.parent log_handlers = ['console_simple', 'console_detail', 'file'] with v.parsing(additional_properties=False): schema = v.parse({ 'debug': v.Nullable(bool, False), '+pg_username': str, '+pg_password': str, '+cookie_secret': str, 'google_id': str, 'google_secret': str, 'readonly': v.Nullable(bool, True), 'enabled': v.Nullable(bool, True), 'log_handlers': (v.Nullable([v.Enum(log_handlers)], log_handlers[:1])), 'log_level': v.Nullable(str, 'DEBUG'), 'log_file': v.Nullable(str, ''), 'path_attachments': v.Nullable(str, str(base_dir / 'attachments')), 'path_theme': v.Nullable(exists, str(base_dir / 'front')), 'imap_body_maxsize': v.Nullable(int, 50 * 1024 * 1024), 'imap_batch_size': v.Nullable(int, 2000), 'imap_debug': v.Nullable(int, 0), 'smtp_debug': v.Nullable(bool, False), 'async_pool': v.Nullable(int, 0), 'ui_ga_id': v.Nullable(str, ''), 'ui_is_public': v.Nullable(bool, False), 'ui_use_names': v.Nullable(bool, True), 'ui_per_page': v.Nullable(int, 100), 'ui_greeting': v.Nullable(str, ''), 'ui_ws_proxy': v.Nullable(bool, False), 'ui_ws_enabled': v.Nullable(bool, True), 'ui_ws_timeout': v.Nullable(int, 1000), 'ui_firebug': v.Nullable(bool, False), 'ui_tiny_thread': v.Nullable(int, 5), 'ui_by_thread': v.Nullable(bool, False), 'from_emails': v.Nullable([str], []), 'host_ws': v.Nullable(str, 'ws://localhost/async/'), 'host_web': v.Nullable(strip_slash, 'http://localhost:8000'), 'search_lang': v.Nullable([str], ['simple', 'english']), }) conf = schema.validate(conf) path = Path(conf['path_attachments']) if not path.exists(): path.mkdir() return conf
return [x] with V.parsing(required_properties=True, additional_properties=V.Object.REMOVE): CONFIG_VALIDATOR = V.parse({ "coordinator_endpoint": xutils.Endpoint(xutils.Endpoint.Side.CONNECT), "?coordinator_timeout": "integer", "?start_delay": "integer", "?github_secret": "string", "?github": V.Mapping("string", V.AdaptBy(_list_wrap)) }) XBBS_CFG_DIR = os.getenv("XBBS_CFG_DIR", "/etc/xbbs") with open(path.join(XBBS_CFG_DIR, "webhooks.toml"), "r") as fcfg: cfg = CONFIG_VALIDATOR.validate(toml.load(fcfg)) coordinator = cfg["coordinator_endpoint"] cmd_timeout = cfg.get("coordinator_timeout", 1500) start_delay = cfg.get("start_delay", 600) hmac_key = cfg.get("github_secret", None) github_mapping = cfg.get("github", {}) def verify_sig(data, secret, signature): s = hmac.new(secret.encode("utf-8"), data, digestmod="sha256")
def draft(env, thrid, action): saved = env.storage('compose', thrid=thrid) saved_path = env.files.subpath('compose', thrid=thrid) if action == 'preview': schema = v.parse({ '+fr': str, '+to': str, '+subj': str, '+body': str, '+quoted': bool, '+forward': bool, '+id': v.Nullable(str), 'quote': v.Nullable(str) }) data = schema.validate(env.request.json) if env.request.args.get('save', False): saved.set(data) return get_html(data['body'], data.get('quote', '')) elif action == 'upload': count = env.request.form.get('count', type=int) files = [] for n, i in enumerate(env.request.files.getlist('files'), count): path = '/'.join([saved_path, str(n), f.slugify(i.filename)]) env.files.write(path, i.stream.read()) files.append(env.files.to_dict(path, i.mimetype, i.filename)) return files elif action == 'send': import dns.resolver import dns.exception class Email(v.Validator): def validate(self, value, adapt=True): if not value: raise v.ValidationError('No email') addr = parseaddr(value)[1] hostname = addr[addr.find('@') + 1:] try: dns.resolver.query(hostname, 'MX') except dns.exception.DNSException: raise v.ValidationError('No MX record for %s' % hostname) return value schema = v.parse({ '+to': v.ChainOf( v.AdaptBy(lambda v: [i.strip() for i in v.split(',')]), [Email] ), '+fr': Email, '+subj': str, '+body': str, 'id': v.Nullable(str), 'quote': v.Nullable(str, ''), }) msg = schema.validate(env.request.json) if msg.get('id'): parent = env.sql(''' SELECT thrid, msgid, refs FROM emails WHERE id=%s LIMIT 1 ''', [msg['id']]).fetchone() msg['in_reply_to'] = parent.get('msgid') msg['refs'] = parent.get('refs', [])[-10:] else: parent = {} sendmail(env, msg) if saved.get(): draft(env, thrid, 'rm') syncer.sync_gmail(env, env.email, only=['\\All'], fast=1, force=1) url = url_query(env, 'in', '\\Sent') if parent.get('thrid'): url = env.url_for('thread', {'id': parent['thrid']}) return {'url': url} elif action == 'rm': if saved.get({}).get('files'): env.files.rm(saved_path) saved.rm() return 'OK' env.abort(400)
class TestSuiteCitySDKKlarschiff(citysdk311.TestSuiteCitySDK): SCHEMA_REQUESTSEXT = { "+service_requests": { "+request": [{ "extended_attributes": { "title": "string", #TOOD: are EXT attributes optional even if requested? "service_object_id": "string", "service_object_type": "string", "detailed_status": valideer.Enum([ "RECEIVED", "IN_PROCESS", "PROCESSED", "ARCHIVED", "REJECTED" ]), "detailed_status_datetime": valideer.AdaptBy(parser.parse), "media_urls": { "media_url": ["string"] } }, "photo_required": "boolean", "trust": "integer", "votes": "integer", "+service_request_id": "string", "+status": valideer.Enum(["OPEN", "CLOSED"]), "status_notes": "string", "+service_name": "string", "+service_code": "string", "description": "string", "agency_responsible": "string", "service_notice": "string", "requested_datetime": valideer.AdaptBy(parser.parse), "updated_datetime": valideer.AdaptBy(parser.parse), "expected_datetime": valideer.AdaptBy(parser.parse), "address": "string", #TODO: Make Position XOR address "address_id": "string", "zipcode": "string", "+lat": valideer.AdaptTo(float, "number"), "+long": valideer.AdaptTo(float, "number"), "media_url": "string", }] #TODO: make URL regex } } SCHEMA_COMMENTS = { "+comments": { #TODO: Conditional Array? "+comment": { "+id": "integer", "+jurisdiction_id": "string", "+comment": "string", "+datetime": valideer.AdaptBy(parser.parse), "+service_request_id": "string", "+author": "string", #TODO: Validate as email } } } SCHEMA_NOTES = { "+notes": { #TODO: Conditional Array? "+note": { "+jurisdiction_id": "string", "+comment": "string", "+datetime": valideer.AdaptBy(parser.parse), "+service_request_id": "string", "+author": "string" } } } def __init__(self, host): self.api = CitysdkKlarschiff(host) citysdk311.TestSuiteCitySDK.__init__(self, host, self.api) def testRequestsExtKS(self): print("Testing GET extended requests"), repl = self.api.getRequests() expect = [200, "UTF-8", "text/xml; charset=utf-8"] resp = [repl.status_code, repl.encoding, repl.headers["content-type"]] self.showDifferences(expect, resp, "http skelleton") replFields = self.xmlToDict(repl.content) print("- XML structure"), with valideer.parsing(additional_properties=True): validator = valideer.parse(self.SCHEMA_REQUESTSEXT) try: validator.validate(replFields) print("OK") except ValidationError as e: print("differs at %s " % (str(e))) def testGetRequestsExFilteredKS(self, agency): print("Testing GET requests with extended filters") print("-agency"), repl = self.api.getRequests(lat=12.13955, long=54.09138, radius=2000) #TODO calculate that point self.testEmptyRequestSet(repl) print("-detailed_status"), repl = self.api.getRequests(statusEx="RECEIVED") self.testEmptyRequestSet(repl) def testGetComments(self, apikey): repl = self.api.getComments(66, apikey) expect = [200, "UTF-8", "text/xml; charset=utf-8"] resp = [repl.status_code, repl.encoding, repl.headers["content-type"]] self.showDifferences(expect, resp, "http skelleton") replFields = self.xmlToDict(repl.content) print("- XML structure"), with valideer.parsing(additional_properties=True): validator = valideer.parse(self.SCHEMA_COMMENTS) try: validator.validate(replFields) print("OK") except ValidationError as e: print("differs at %s " % (str(e))) def testGetNotes(self, apikey): repl = self.api.getNotes(66, apikey) expect = [200, "UTF-8", "text/xml; charset=utf-8"] resp = [repl.status_code, repl.encoding, repl.headers["content-type"]] self.showDifferences(expect, resp, "http skelleton") replFields = self.xmlToDict(repl.content) print("- XML structure"), with valideer.parsing(additional_properties=True): validator = valideer.parse(self.SCHEMA_NOTES) try: validator.validate(replFields) print("OK") except ValidationError as e: print("differs at %s " % (str(e)))
xutils.Endpoint(), { "bind": xutils.Endpoint(xutils.Endpoint.Side.BIND), "connect": xutils.Endpoint(xutils.Endpoint.Side.CONNECT) })) def _receive_adaptor(x): if isinstance(x, str): return {"bind": x, "connect": x} return x @V.accepts(x="string") def _path_exists(x): return os.access(x, os.R_OK) CONFIG_VALIDATOR = V.parse({ "command_endpoint": V.AdaptBy(_receive_adaptor), "project_base": "string", "build_root": V.AllOf("string", path.isabs), "intake": V.AdaptBy(_receive_adaptor), "worker_endpoint": xutils.Endpoint(xutils.Endpoint.Side.BIND), # use something like a C identifier, except disallow underscore as a # first character too. this is so that we have a namespace for xbbs # internal directories, such as collection directories "projects": V.Mapping( xutils.PROJECT_REGEX, { "git": "string",
class TestSuite(object): '''Open311 basic test. Inherited classes just run it's own tests''' SCHEMA_SERVICE = { "+services": { "+service": [{ "+service_code": "string", "+service_name": "string", "description": "string", "+metadata": valideer.Enum(["true", "false"]), "+type": valideer.Enum(["realtime", "batch", "blackbox"]), "+keywords": "string", "group": "string", }] } } SCHEMA_DEF = { "+service_code": "string", "+attributes": [{ "+variable": valideer.Enum(["true", "false"]), "+code": "string", "+datatype": valideer.Enum([ "string", "number", "datetime", "text", "singlevaluelist", "multivaluelist" ]), "+required": valideer.Enum(["true", "false"]), "+datatype_description": "string", "+order": valideer.Range(valideer.Number, 0), "description": "string" }] } SCHEMA_REQUESTS = { "+service_requests": { "+request": [{ "+service_request_id": "string", "+status": valideer.Enum(["open", "closed"]), "status_notes": "string", "+service_name": "string", "+service_code": "string", "description": "string", "agency_responsible": "string", "service_notice": "string", "requested_datetime": valideer.AdaptBy(parser.parse), "updated_datetime": valideer.AdaptBy(parser.parse), "expected_datetime": valideer.AdaptBy(parser.parse), "address": "string", #TODO: Make Position XOR address "address_id": "string", "zipcode": "string", "+lat": valideer.AdaptTo(float, "number"), "+long": valideer.AdaptTo(float, "number"), "media_url": "string", }] #TODO: make URL regex } } SCHEMA_REQUEST = { "+service_requests": { #TODO: Try to remove slightly redundant schema "+request": { "+service_request_id": "string", "+status": valideer.Enum(["open", "closed"]), "status_notes": "string", "+service_name": "string", "+service_code": "string", "description": "string", "agency_responsible": "string", "service_notice": "string", "requested_datetime": valideer.AdaptBy(parser.parse), "updated_datetime": valideer.AdaptBy(parser.parse), "expected_datetime": valideer.AdaptBy(parser.parse), "address": "string", #TODO: Make Position XOR address "address_id": "string", "+zipcode": "string", "+lat": valideer.AdaptTo(float, "number"), "+long": valideer.AdaptTo(float, "number"), "media_url": "string", } #TODO: make URL regex } } @staticmethod def __noNone(path, key, value): '''we skip dict none values for like <key/> elements ''' if value is None: return key, "" '''we skip additional typisation fields''' if (isinstance(value, dict)): if "@nil" in value: return key, "" if "@type" in value: if "#text" in value: return key, value["#text"] #if value.hasKey("@type"): oder alle mit @ return key, value def __init__(self, host, api=None, apikey=None): self.host = host if api is None: api = georeporter(host, apikey) self.api = api self.__cacheTransfers() def showDifferences(self, expect, result, msgTest): '''helper for visual compare of two lists''' #TODO: Add normalizing option - lowercase, trim, ... s = set(expect) diff = [x for x in result if x not in s] print("- " + msgTest), if len(diff) == 0: print("OK") else: print("differs at: %s (expected: %s)" % (str(diff), str(expect))) def __cacheTransfers(self): '''call api just one time for defaults''' print "caching..." print "-", self.cache_services = self.api.getServices() print "-", self.cache_requests = self.api.getRequests() #TODO: Write to disk? def __getFirstServiceCode(self): serviceCode = None root = etree.fromstring(self.cache_services.content) for service in root.getchildren(): fields = self.xmlToDict(etree.tostring(service)) try: if fields["metadata"] == "true": serviceCode = fields["service_code"] return serviceCode except: pass #TODO: Log that metadata is mandatory? return serviceCode def __getFirstRequestId(self): srid = None root = etree.fromstring(self.cache_requests.content) for servicereq in root.getchildren(): fields = self.xmlToDict(etree.tostring(servicereq))["request"] if "service_request_id" in fields: srid = fields["service_request_id"] return srid return srid def xmlToDict(self, repl): '''Transform XML tree to dict tree''' tmp = xmltodict.parse(repl, postprocessor=self.__noNone) replFields = json.loads(json.dumps(tmp)) return replFields #TODO: Test discovery def testGetServices(self): print("Testing GET services") repl = self.cache_services expect = [200, "UTF-8", "text/xml; charset=utf-8"] resp = [repl.status_code, repl.encoding, repl.headers["content-type"]] self.showDifferences(expect, resp, "http skelleton") print("- XML structure"), with valideer.parsing(additional_properties=True): validator = valideer.parse(self.SCHEMA_SERVICE) replFields = self.xmlToDict(repl.content) try: validator.validate(replFields) print("OK") except ValidationError as e: print("differs %s " % (str(e))) def testGetServicsDef(self): #TODO: walk trough all definitions print("Testing GET service definition"), firstCode = self.__getFirstServiceCode() if firstCode is not None: repl = self.api.getServiceDef(firstCode) expect = [200, "UTF-8", "text/xml; charset=utf-8"] resp = [ repl.status_code, repl.encoding, repl.headers["content-type"] ] self.showDifferences(expect, resp, "http skelleton") print("- XML structure"), with valideer.parsing(additional_properties=True): validator = valideer.parse(self.SCHEMA_DEF) replFields = self.xmlToDict(repl) try: validator.validate(replFields) print("OK") except ValidationError as e: print("differs at %s " % (str(e))) else: print "(No service definitions available for testing)" def testCreateRequest(self, email="*****@*****.**", lat=54.0867, lon=12.1359, descr="test", title="test", code="18"): #TODO: Extract attributes print("Testing POST request"), repl = self.api.createRequest(lat, lon, code, email, descr, attr={"title": title}, jurisdinction="rostock.de", tolerantSSL=True) expect = [200, "UTF-8", "text/xml; charset=utf-8"] resp = [repl.status_code, repl.encoding, repl.headers["content-type"]] self.showDifferences(expect, resp, "http skelleton") #TODO: Attributes of metadata / service definitions #TODO: what more can we check here? #TODO: Some might return token instead def testGetRequests(self): print("Testing GET requests"), repl = self.cache_requests expect = [200, "UTF-8", "text/xml; charset=utf-8"] resp = [repl.status_code, repl.encoding, repl.headers["content-type"]] self.showDifferences(expect, resp, "http skelleton") print("- XML structure"), with valideer.parsing(additional_properties=True): validator = valideer.parse(self.SCHEMA_REQUESTS) replFields = self.xmlToDict(repl.content) try: validator.validate(replFields) print("OK") except ValidationError as e: print("differs at %s " % (str(e))) def testGetRequestsFiltered(self): print("Testing GET requests with filters") #TODO: How to generate indipendend queries? #TODO: How to check logic on expected results? print("-service_request_id"), repl = self.api.getRequests(service_request_id="3,5") self.testEmptyRequestSet(repl) print("-service_code") repl = self.api.getRequests(service_code=18) self.testEmptyRequestSet(repl) print("-start_date") past = datetime.datetime.utcnow() - datetime.timedelta(2) repl = self.api.getRequests(start_date=past.isoformat()) self.testEmptyRequestSet(repl) print("-end_date") repl = self.api.getRequests(end_date=past.isoformat()) self.testEmptyRequestSet(repl) print("-status") repl = self.api.getRequests(status="closed") self.testEmptyRequestSet(repl) #TODO: None older than 90days? def testGetRequest(self): print("Testing GET request"), srid = self.__getFirstRequestId() repl = self.api.getRequest(srid) expect = [200, "UTF-8", "text/xml; charset=utf-8"] resp = [repl.status_code, repl.encoding, repl.headers["content-type"]] self.showDifferences(expect, resp, "http skelleton") print("- XML structure") with valideer.parsing(additional_properties=True): validator = valideer.parse(self.SCHEMA_REQUEST) replFields = self.xmlToDict(repl.content) try: validator.validate(replFields) print("OK") except ValidationError as e: print("differs at %s " % (str(e))) def testGetRequestFromToken(self): print("Testing GET ID by token"), mytoken = "123" #TODO: Implement lookup for an valid token? repl = self.api.getIdByToken(mytoken) expect = [200, "UTF-8", "text/xml; charset=utf-8"] resp = [repl.status_code, repl.encoding, repl.headers["content-type"]] self.showDifferences(expect, resp, "http skelleton") def testEmptyRequestSet(self, repl): expect = [200, "UTF-8", "text/xml; charset=utf-8"] resp = [repl.status_code, repl.encoding, repl.headers["content-type"]] self.showDifferences(expect, resp, "http skelleton") print "- queryset", replFields = self.xmlToDict(repl.content) amount = 1 #if no subdicts, just a single request try: for item in replFields["service_requests"]["request"]: if isinstance(item, dict): amount += 1 if (amount > 0): print "ok (%d)" % amount else: print("failed (empty)") except KeyError: print("failed (empty)")
import requests import toml import valideer as V import yaml import zmq.green as zmq from logbook import Logger, StderrHandler, StreamHandler import xbbs.messages as msgs import xbbs.util as xutils with V.parsing(required_properties=True, additional_properties=None): CONFIG_VALIDATOR = V.parse({ "job_endpoint": xutils.Endpoint(xutils.Endpoint.Side.BIND), "capabilities": V.Nullable(V.AdaptBy(xutils.list_to_set), set()), }) @attr.s class XbbsWorker: current_project = attr.ib(default=None) current_job = attr.ib(default=None) zmq = attr.ib(default=zmq.Context.instance()) def download(url, to): src = urlparse(url, scheme='file') if src.scheme == 'file': shutil.copy(src.path, to) else:
class TestSuiteCitySDK(basic311.TestSuite): '''Cover CitySDK specific tests''' SCHEMA_REQUESTSEXT = { "+service_requests": { "+request": [{ "extended_attributes": { "title": "string", #TOOD: are EXT attributes optional even if requested? "service_object_id": "string", "service_object_type": "string", "detailed_status": valideer.Enum([ "RECEIVED", "IN_PROCESS", "PROCESSED", "ARCHIVED", "REJECTED" ]), #TODO: Add Public works status "media_urls": { "media_url": ["string"] } } #TODO: some implement , "+service_request_id": "string", "+status": valideer.Enum(["open", "closed"]), "status_notes": "string", "+service_name": "string", "+service_code": "string", "description": "string", "agency_responsible": "string", "service_notice": "string", "requested_datetime": valideer.AdaptBy(parser.parse), "updated_datetime": valideer.AdaptBy(parser.parse), "expected_datetime": valideer.AdaptBy(parser.parse), "address": "string", #TODO: Make Position XOR address "address_id": "string", "zipcode": "string", "+lat": valideer.AdaptTo(float, "number"), "+long": valideer.AdaptTo(float, "number"), "media_url": "string", }] #TODO: make URL regex } } def __init__(self, host, apikey, api=None): if (api is None): api = CitySDKParticipation(host, apikey) basic311.TestSuite.__init__(self, host, api) def testLocale(self, locale): print("Testing GET services (%s)" % locale), repl = self.api.getServices(locale) #TODO: Exact specs. for locale codes? expect = [200, "UTF-8", "text/xml; charset=utf-8"] resp = [repl.status_code, repl.encoding, repl.headers["content-type"]] self.showDifferences(expect, resp, "http skelleton") #Default and custom should differ replFields = self.xmlToDict(repl.content) if len(replFields["services"]) > 0: serviceLocale = replFields["services"]["service"][0] replFieldsEN = self.xmlToDict(self.cache_services.content) serviceEN = replFieldsEN["services"]["service"][0] if not (serviceLocale["service_name"] == serviceEN["service_name"]): print("ok") else: print("is ignored (same as defaults)") else: print "empty" def testRequestsExt(self): print("Testing GET extended requests"), # repl=self.cache_requests repl = self.api.getRequests() expect = [200, "UTF-8", "text/xml; charset=utf-8"] resp = [repl.status_code, repl.encoding, repl.headers["content-type"]] self.showDifferences(expect, resp, "http skelleton") replFields = self.xmlToDict(repl.content) print("- limits"), l = len(replFields["service_requests"]) if (l > 200): print("to much (%d)" % l) #90 days or first 200 else: print("ok") print("- XML structure"), with valideer.parsing(additional_properties=True): validator = valideer.parse(self.SCHEMA_REQUESTSEXT) try: validator.validate(replFields) print("OK") except ValidationError as e: print("differs at %s " % (str(e))) def testGetRequestsExtFiltered(self): print("Testing GET requests with extended filters") print("-geospatial"), repl = self.api.getRequests(lat=12.13955, long=54.09138, radius=2000) #TODO calculate that point self.testEmptyRequestSet(repl) print("-update_after") past = datetime.datetime.utcnow() - datetime.timedelta(7) repl = self.api.getRequests(updated_after=past.isoformat()) self.testEmptyRequestSet(repl) print("-update_after") repl = self.api.getRequests(updated_before=past.isoformat()) self.testEmptyRequestSet(repl)
class JobRequest(BaseMessage): capabilities = attr.ib() _validator = V.parse({"capabilities": V.AdaptBy(xutils.list_to_set)})
class WorkMessage(BaseMessage): project = attr.ib() git = attr.ib() revision = attr.ib() _validator = V.parse({ "project": "string", "git": "string", "revision": "string" }) PKG_TOOL_VALIDATOR = V.parse( V.Mapping( "string", { "version": "string", "architecture": V.AnyOf("string", V.AdaptBy(xutils.list_to_set)), })) @attr.s class JobMessage(BaseMessage): @staticmethod def _filter(x, v): return v is not None project = attr.ib() job = attr.ib() repository = attr.ib() revision = attr.ib() output = attr.ib() build_root = attr.ib()