def test_answers_report(self): """Bei Erfolg wird eine json-zusammenfassung geliefert.""" result = self.app.delete('/admin/alpha/') self.assertEquals(json.loads(result.body), { 'success': True, 'deleted': 1, })
def fetch_json2xx(url, content='', method='GET', credentials=None, headers=None, multipart=False, ua='', timeout=50, caching=None): """Like `fetch2xx()` but JSON-decodes the returned content and returns only that.""" status, rheaders, rcontent = fetch2xx(url, content, method, credentials, headers, multipart, ua, timeout, caching) if not rheaders.get('content-type', '').startswith('application/json'): raise TypeError(u"Ungueltiger Content-Type %r: %r" % (rheaders.get('content-type', ''), rcontent)) return hujson.loads(rcontent)
def parse_json(content): """Parse JSON response >>> parse_json('{"messages": [{"url": "http://example.com/q/45054/"}]}') [u'http://example.com/q/45054/'] """ result = json.loads(content) return [msg.get('url') for msg in result.get('messages', [])]
def decode(data): """ Decode a message from a valid wire representation. Besides the types familar from Python's json module it can unserialize Decimal() objects. >>> decode('{"created_by":"test","guid":123,"num":5.00}') {u'guid': 123, u'num': Decimal('5.00'), u'created_by': u'test'} """ warnings.warn("hutools.humessaging is deprecated use cs.messaging", DeprecationWarning, stacklevel=2) return json.loads(data)
def fetch_json2xx(url, content='', method='GET', credentials=None, headers=None, multipart=False, ua='', timeout=25): """Like `fetch2xx()` but JSON-decodes the returned content and returns only that.""" status, rheaders, rcontent = fetch2xx(url, content, method, credentials, headers, multipart, ua, timeout) if not rheaders.get('content-type', '').startswith('application/json'): raise TypeError("Ungueltiger Content-Type '%s': %s" % (rheaders.get('content-type', ''), rcontent)) return hujson.loads(rcontent)
def decodingreturnhandler(status, rheaders, rcontent): """Closure to do the json decoding and then call the provided returnhandler""" if (status < 200) or (status >= 300): raise exceptions.WrongStatusCode(u"%s: Fehler: %r" % (status, rcontent)) # Warnig! httplib2 ist case sensitive for header field names. if not rheaders.get('Content-Type', '').startswith('application/json'): logging.debug("no valid content type: %r", rheaders) # There seems to be an interesting issue with the AppEngine Frontend Cache Servers: # When serving from the cache to an AppEngine client (date being rquested from an other # AppEngine Application) the content type header seems to get dropped completely. # So wo only check for a missmatched header but ignore empty ones. # So far this has been only observed with async requests. if rheaders.get('Content-Type', None) is not None: raise TypeError(u"%s: Ungueltiger Content-Type %r: %r" % (url, rheaders.get('Content-Type', ''), rcontent)) return returnhandler(hujson.loads(rcontent))
def raw_SQL(command, ua=''): """Reines SQL ausführen - darf nur von 4-reviewtem Code benutzt werden.""" args_encoded = urllib.urlencode({'query': command}) url = "/raw?" + args_encoded headers = {'Cache-Control': "no-cache"} url += '&ts=%s' % time.time() # sign request digest = hmac.new(_find_credentials(), url, hashlib.sha1).hexdigest() softmexpresshost = os.environ.get('SOFTMEXPRESSHOST', SOFTMEXPRESSHOST) headers = {'X-sig': digest} url = 'http://' + softmexpresshost + url method = 'POST' if command.upper().startswith('SELECT'): method = 'GET' # Wir nutzen POST mit einem Query-String in der URL. Das ist dirty # See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9.4 for the reasoning here method = 'GET' if command.startswith('UPDATE'): method = 'POST' if command.startswith('INSERT'): method = 'POST' status, headers, content = huTools.http.fetch(url, method=method, headers=headers, content={'query': command, 'ua': ua}, ua='%s/husoftm2.backend' % ua, timeout=3000) if status != 200: logging.error("%s %s" % (method, url)) raise SQLError(status, content) # Not all replies are JSON encoded try: return hujson.loads(content) except: return content return content
def _fetch_message_urls(self): """Erfragt die URLs der Nachrichten auf der Queue vom Server, und gibt sie als Liste zurück. Mögliche Exceptions: FmtpHttpError, FmtpFormatError """ status, headers, body = http.fetch(self.queue_url, method='GET', credentials=self.credentials, headers={'Accept': 'application/json'}) # Nach HTTP-Fehlern schauen. if status != 200: raise FmtpHttpError('requested %s as Messagelist, got %s' % (self.queue_url, status)) # Antwort parsen, wenn nicht parsebar, exception try: data = hujson.loads(body) except: raise FmtpFormatError('Expeceted to get a json messagelist at %s.' % self.queue_url) # Urls auflisten, wenn Format nicht stimmt: exception try: return [msg['url'] for msg in data['messages']] except KeyError: raise FmtpFormatError('Expected the message at %s list to have /messages[*]/url', self.queue_url)
def test_returns_correct_list(self): result = self.app.get('/admin/alpha/') body = json.loads(result.body) self.assertEqual(body['messages'], [ { u'queue': u'alpha', u'guid': u'deleted', u'is_deleted': True, u'created_at': u'2011-03-23 00:00:00', u'deleted_at': u'2011-03-23 00:00:00', u'content_type': u'text/plain', }, { u'queue': u'alpha', u'guid': u'alice', u'is_deleted': False, u'created_at': u'2011-03-23 00:00:00', u'deleted_at': None, u'content_type': u'text/plain', } ] )
def internelreturnhandler(status, rheaders, rcontent): result = execute_process_results(status, rcontent, rheaders) rows = hujson.loads(result) processed = query_process_results(querymappings, fields, rows, args, cachingtime) return returnhandler(processed)
def query(tables=None, condition=None, fields=None, querymappings=None, joins=None, grouping=None, ordering=None, limit=None, ua='', cachingtime=300): r"""Execute a SELECT on the AS/400 turning the results in a list of dicts. In fields you can give a list of fields you are interested in. If fields is left empty the engine generates a list of field on it own by consulting the field mapping database in from fields.MAPPINGDIR. >>> query('ALK00', condition="LKLFSN=4034544") #doctest: +ELLIPSIS [{'lager': 100, ...}] To suppress mapping provide querymappings={} and fields=[]. >>> query(tables=['XPN00'], condition="PNSANR=2255") [{'satznummer': 2255, 'preis': Decimal('16.10')}] >>> query(tables=['XPN00'], condition="PNSANR=2255", ... fields=['PNSANR', 'PNPRB'], querymappings={}) [(2255, Decimal('16.10'))] To get only certain fields give a list of fieldnames in fields=[...]. >>> query(tables=['XPN00'], condition="PNSANR=2255", fields=['PNPRB']) [(Decimal('16.10'),)] Joins are straightforward if used with condition="<expression>": >>> query(['XPN00', 'XPR00'], condition="PNSANR=PRSANR and PNSANR=2255", ... fields=['PRDTVO', 'PNPRB']) [{'preis': Decimal('16.10'), 'gueltig_ab_date': datetime.date(2004, 12, 16)}] Aggregate functions can be created by using the "grouping" keyword: >>> sorted(query('XLF00', fields=['LFARTN', 'SUM(LFMGLP)'], grouping=['LFARTN'], ... condition="LFLGNR=3")) [(u'65166/01', u'0'), (u'65198', u'0'), (u'76095', u'0'), (u'76102', u'0'), (u'ED76095', u'0')] If desired "querymappings" can be used to return a list of dicts: >>> sorted(query('XLF00', fields=['LFARTN', 'SUM(LFMGLP)'], grouping=['LFARTN'], ... condition="LFLGNR=3", querymappings={'LFARTN': 'artnr', ... 'SUM(LFMGLP)': 'menge'})) #doctest: +ELLIPSIS [{'menge': u'0', 'artnr': u'65166/01'}, {'menge': u'0', 'artnr': u'65198'}, ...] You can use 'joins' to define LEFT OUTER JOINs. E.g.: >>> rows = query(['XKD00'], ... condition="KDKDNR='%8d'" % int(66669), ... joins=[('XXC00', 'KDKDNR', 'XCADNR'), ... ('XKS00', 'KDKDNR', 'KSKDNR'), ... ('AKZ00', 'KDKDNR', 'KZKDNR')]) Will result in "SELECT * FROM XKD00 LEFT OUTER JOIN XXC00 ON KDKDNR=XCADNR LEFT OUTER JOIN XKS00 ON KDKDNR=KSKDNR LEFT OUTER JOIN AKZ00 ON KDKDNR=KZKDNR WHERE KDKDNR=' 10001'". We also should be - to a certain degree - be Unicode aware: >>> query(u'XKD00', u"KDKDNR LIKE '%18287'")[0]['ort'].encode('utf8') 'G\xc3\xbcnzburg' Results are cached for 300 seconds unless you set something else via the cachingtime parameter. """ args, bust_cache, querymappings, fields = query_prepare_parameters(tables, grouping, ordering, fields, joins, querymappings, ua, condition, limit, cachingtime) start = time.time() result = execute('sql', args, ua=ua, bust_cache=bust_cache) rows = hujson.loads(result) delta = time.time() - start if delta > 5: logging.warning("Slow (%.3fs) SQL query in %s", delta, args) return query_process_results(querymappings, fields, rows, args, cachingtime)
def query(tables=None, condition=None, fields=None, querymappings=None, joins=None, grouping=None, ordering=None, limit=None, ua='', cachingtime=300): r"""Execute a SELECT on the AS/400 turning the results in a list of dicts. In fields you can give a list of fields you are interested in. If fields is left empty the engine generates a list of field on it own by consulting the field mapping database in from fields.MAPPINGDIR. >>> query('ALK00', condition="LKLFSN=4034544") #doctest: +ELLIPSIS [{'lager': 100, ...}] To suppress mapping provide querymappings={} and fields=[]. >>> query(tables=['XPN00'], condition="PNSANR=2255") [{'satznummer': 2255, 'preis': Decimal('16.10')}] >>> query(tables=['XPN00'], condition="PNSANR=2255", ... fields=['PNSANR', 'PNPRB'], querymappings={}) [(2255, Decimal('16.10'))] To get only certain fields give a list of fieldnames in fields=[...]. >>> query(tables=['XPN00'], condition="PNSANR=2255", fields=['PNPRB']) [(Decimal('16.10'),)] Joins are straightforward if used with condition="<expression>": >>> query(['XPN00', 'XPR00'], condition="PNSANR=PRSANR and PNSANR=2255", ... fields=['PRDTVO', 'PNPRB']) [{'preis': Decimal('16.10'), 'gueltig_ab_date': datetime.date(2004, 12, 16)}] Aggregate functions can be created by using the "grouping" keyword: >>> sorted(query('XLF00', fields=['LFARTN', 'SUM(LFMGLP)'], grouping=['LFARTN'], ... condition="LFLGNR=3")) [(u'65166/01', u'0'), (u'65198', u'0'), (u'76095', u'0'), (u'76102', u'0'), (u'ED76095', u'0')] If desired "querymappings" can be used to return a list of dicts: >>> sorted(query('XLF00', fields=['LFARTN', 'SUM(LFMGLP)'], grouping=['LFARTN'], ... condition="LFLGNR=3", querymappings={'LFARTN': 'artnr', ... 'SUM(LFMGLP)': 'menge'})) #doctest: +ELLIPSIS [{'menge': u'0', 'artnr': u'65166/01'}, {'menge': u'0', 'artnr': u'65198'}, ...] You can use 'joins' to define LEFT OUTER JOINs. E.g.: >>> rows = query(['XKD00'], ... condition="KDKDNR='%8d'" % int(66669), ... joins=[('XXC00', 'KDKDNR', 'XCADNR'), ... ('XKS00', 'KDKDNR', 'KSKDNR'), ... ('AKZ00', 'KDKDNR', 'KZKDNR')]) Will result in "SELECT * FROM XKD00 LEFT OUTER JOIN XXC00 ON KDKDNR=XCADNR LEFT OUTER JOIN XKS00 ON KDKDNR=KSKDNR LEFT OUTER JOIN AKZ00 ON KDKDNR=KZKDNR WHERE KDKDNR=' 10001'". We also should be - to a certain degree - be Unicode aware: >>> query(u'XKD00', u"KDKDNR LIKE '%18287'")[0]['ort'].encode('utf8') 'G\xc3\xbcnzburg' Results are cached for 300 seconds unless you set something else via the cachingtime parameter. """ # fixup sloppy parameter passing if isinstance(tables, basestring): tables = [tables] if isinstance(grouping, basestring): grouping = [grouping] if isinstance(ordering, basestring): ordering = [ordering] if isinstance(fields, basestring): fields = [fields] if not joins: joins = [] if not grouping: grouping = [] if not ordering: ordering = [] if not fields: fields = [] tablenames = [_get_tablename(x) for x in tables] if querymappings == {} and not fields: raise RuntimeError("Please give fieldnames.") if querymappings is None and len(fields) != 1: querymappings = {} jointables = [table for table, foo, bar in joins] for table in tables + jointables: # dubletten = set(querymappings.values()) & set(MAPPINGDIR.get(table, {}).values()) # if dubletten: # logging.warning('field name clash: %s' % list(dubletten)) querymappings.update(MAPPINGDIR.get(table, {})) if not fields: # decuce fieldnames from querymappings fields = querymappings.keys() if not fields: # still nothing found raise RuntimeError("can't deduce field names, check fields.py") args = dict(fields=fields, tablenames=tablenames, tag=ua) if condition: args['condition'] = condition if grouping: args['grouping'] = grouping if ordering: args['ordering'] = ordering if limit: args['limit'] = limit if joins: # ensure a list of 3-tuples joins = [(_get_tablename(a), b, c) for (a, b, c) in joins] args['joins'] = joins bust_cache = True if cachingtime > 0: bust_cache = False rows = memcache.get('husoftm_query_%r_%r' % (querymappings, args)) if rows: return rows start = time.time() result = execute('sql', args, ua=ua, bust_cache=bust_cache) rows = hujson.loads(result) if querymappings: rows = _rows2dict(fields, querymappings, rows) else: rows = [tuple([_fix_field(data, name) for data, name in zip(row, fields)]) for row in rows] delta = time.time() - start if delta > 5: logging.warning("Slow (%.3fs) SQL query in %s", delta, args) try: memcache.add(key='husoftm_query_%r_%r' % (querymappings, args), value=rows, time=cachingtime) except: pass # value 'rows' was probably to big for memcache or memcache was offline return rows
def _get_json(self, url): """helper: requests the url as json""" response = self.app.get(url) return loads(response.body)
def get_json(self, path): """Stellt eine GET-Anfrage an path und konvertiert das Ergebnis nach JSON.""" result = self.app.get(path, headers={'Accept': 'application/json'}) return json.loads(result.body)