def save_many(self, query, comment=None, data=None, action=None): _query = simplejson.dumps(query) #for q in query: # self._run_hooks('before_new_version', q) data = data or {} result = self._request( '/save_many', 'POST', dict(query=_query, comment=comment, action=action, data=simplejson.dumps(data))) self._invalidate_cache([r['key'] for r in result]) for q in query: self._run_hooks('on_new_version', q) return result
def write(self, query, comment=None, action=None): self._run_hooks('before_new_version', query) _query = simplejson.dumps(query) result = self._request('/write', 'POST', dict(query=_query, comment=comment, action=action)) self._run_hooks('on_new_version', query) self._invalidate_cache(result.created + result.updated) return result
def GET(self, sitename, offset): i = web.input(timestamp=None, limit=1000) if not config.writelog: raise web.notfound("") else: log = self.get_log(offset, i) limit = min(1000, common.safeint(i.limit, 1000)) try: web.header('Content-Type', 'application/json') yield '{"data": [\n' sep = "" for i in range(limit): line = log.readline().strip() if line: if self.valid_json(line): yield sep + line.strip() sep = ",\n" else: print >> sys.stderr, "ERROR: found invalid json before %s" % log.tell( ) else: break yield '], \n' yield '"offset": ' + simplejson.dumps(log.tell()) + "\n}\n" except Exception, e: print 'ERROR:', str(e)
def get_many(self, keys, raw=False): """When raw=True, the raw data is returned instead of objects. """ if not keys: return [] # simple hack to avoid crossing URL length limit. if len(keys) > 100: things = [] while keys: things += self.get_many(keys[:100], raw=raw) keys = keys[100:] return things data = dict(keys=simplejson.dumps(keys)) result = self._request('/get_many', data=data) things = [] for key in keys: #@@ what if key is not there? if key in result: data = result[key] if raw: things.append(data) else: data = web.storage(common.parse_query(data)) self._cache[key, None] = data things.append( create_thing(self, key, self._process_dict(data))) return things
def GET(self, sitename, offset): i = web.input(timestamp=None, limit=1000) if not config.writelog: raise web.notfound("") else: log = self.get_log(offset, i) limit = min(1000, common.safeint(i.limit, 1000)) try: # read the first line line = log.readline(do_update=False) # first line can be incomplete if the offset is wrong. Assert valid. self.assert_valid_json(line) web.header('Content-Type', 'application/json') yield '{"data": [\n' yield line.strip() for i in range(1, limit): line = log.readline(do_update=False) if line: yield ",\n" + line.strip() else: break yield '], \n' yield '"offset": ' + simplejson.dumps(log.tell()) + "\n}\n" except Exception, e: print 'ERROR:', str(e)
def get_many(self, keys, raw=False): """When raw=True, the raw data is returned instead of objects. """ if not keys: return [] # simple hack to avoid crossing URL length limit. if len(keys) > 100: things = [] while keys: things += self.get_many(keys[:100], raw=raw) keys = keys[100:] return things data = dict(keys=simplejson.dumps(keys)) result = self._request('/get_many', data=data) things = [] for key in keys: #@@ what if key is not there? if key in result: data = result[key] if raw: things.append(data) else: data = web.storage(common.parse_query(data)) self._cache[key, None] = data things.append(create_thing(self, key, self._process_dict(data))) return things
def get_many(self, keys): if not keys: return [] # simple hack to avoid crossing URL length limit. if len(keys) > 100: things = [] while keys: things += self.get_many(keys[:100]) keys = keys[100:] return things data = dict(keys=simplejson.dumps(keys)) result = self._request('/get_many', data=data) things = [] import copy for key in keys: #@@ what if key is not there? if key in result: data = result[key] data = web.storage(common.parse_query(data)) self._cache[key, None] = data things.append(create_thing(self, key, self._process_dict(copy.deepcopy(data)))) return things
def GET(self, sitename, offset): i = web.input(timestamp=None, limit=1000) if not config.writelog: raise web.notfound("") else: log = self.get_log(offset, i) limit = min(1000, common.safeint(i.limit, 1000)) try: web.header('Content-Type', 'application/json') yield '{"data": [\n' sep = "" for i in range(limit): line = log.readline().strip() if line: if self.valid_json(line): yield sep + line.strip() sep = ",\n" else: print >> sys.stderr, "ERROR: found invalid json before %s" % log.tell() else: break yield '], \n' yield '"offset": ' + simplejson.dumps(log.tell()) + "\n}\n" except Exception, e: print 'ERROR:', str(e)
def versions(self, query): def process(v): v = web.storage(v) v.created = parse_datetime(v.created) v.author = v.author and self.get(v.author, lazy=True) return v query = simplejson.dumps(query) versions = self._request('/versions', 'GET', {'query': query}) return [process(v) for v in versions]
def process(query): yield "{\n" for i, r in enumerate(self.db.query(query)): if i: yield ",\n" yield simplejson.dumps(r.key) yield ": " yield process_json(r.key, r.data) yield "}"
def browse(request): if request.GET.get("f", None) == "mochikit.js": return HttpResponse(mochikit.mochikit, content_type="application/x-javascript") if request.GET.get("f", None) == "interpreter.js": return HttpResponse(mochikit.interpreter, content_type="application/x-javascript") desc = jsonrpc_site.service_desc() return render_to_response( "browse.html", {"methods": desc["procs"], "method_names_str": dumps([m["name"] for m in desc["procs"]])} )
def __hash__(self): if self.key: return hash(self.key) else: d = self.dict() # dict is not hashable and converting it to tuple of items isn't # # enough as values might again be dictionaries. The simplest # solution seems to be converting it to JSON. return hash(simplejson.dumps(d, sort_keys=True))
def process(query): yield '{\n' for i, r in enumerate(self.db.query(query)): if i: yield ',\n' yield simplejson.dumps(r.key) yield ": " yield r.data yield '}'
def save_many(self, query, comment=None, action=None): _query = simplejson.dumps(query) #for q in query: # self._run_hooks('before_new_version', q) result = self._request('/save_many', 'POST', dict(query=_query, comment=comment, action=action)) self._invalidate_cache([r['key'] for r in result]) for q in query: self._run_hooks('on_new_version', q) return result
def browse(request): if (request.GET.get('f', None) == 'mochikit.js'): return HttpResponse(mochikit.mochikit, content_type='application/x-javascript') if (request.GET.get('f', None) == 'interpreter.js'): return HttpResponse(mochikit.interpreter, content_type='application/x-javascript') desc = jsonrpc_site.service_desc() return render_to_response('browse.html', { 'methods': desc['procs'], 'method_names_str': dumps( [m['name'] for m in desc['procs']]) })
def browse(request, site=jsonrpc_site): if (request.GET.get('f', None) == 'mochikit.js'): return HttpResponse(mochikit.mochikit, content_type='application/x-javascript') if (request.GET.get('f', None) == 'interpreter.js'): return HttpResponse(mochikit.interpreter, content_type='application/x-javascript') desc = site.service_desc() return render_to_response('browse.html', { 'methods': desc['procs'], 'method_names_str': dumps( [m['name'] for m in desc['procs']]) })
def _response_root(self): accepted = self.headers.getheader( 'Accept') cluster_state = json.dumps( sync.cluster_state.get_state(), sort_keys=True, indent=2) if accepted.find( 'text/html') != -1: # Output in plain text return self._response_plain( "%s\n%s\n"%(RESPONSE_ABOUT, cluster_state)) else: # Output in HTML # TODO: HTML, escaping # TODO: data return self._response_plain( "HTML %s\n<pre>%s</pre>\n"%(RESPONSE_ABOUT, cluster_state))
def dump_json(data): """ Converts the configuration into human-readable string. This conversion must be predictable. This means that the same configuration will always be converted into the same string. Args: data: Data for conversion """ if data == None: return None return json.dumps(data, sort_keys=True, indent=2, check_circular=False)
def dump_json( data): """ Converts the configuration into human-readable string. This conversion must be predictable. This means that the same configuration will always be converted into the same string. Args: data: Data for conversion """ if data == None: return None return json.dumps( data, sort_keys=True, indent=2, check_circular=False)
def write(self, action, sitename, timestamp, data): path = self.get_path(timestamp) dir = os.path.dirname(path) if not os.path.exists(dir): os.makedirs(dir) f = self._open(path, 'a') f.write(simplejson.dumps(dict(action=action, site=sitename, timestamp=timestamp.isoformat(), data=data))) f.write('\n') f.flush() #@@ optimize: call fsync after all modifications are written instead of calling for every modification os.fsync(f.fileno()) f.close()
def dumps(obj, *, cls=None, **kw): """Serialize ``obj`` to a JSON formatted ``str``. If ``skipkeys`` is true then ``dict`` keys that are not basic types (``str``, ``int``, ``float``, ``bool``, ``None``) will be skipped instead of raising a ``TypeError``. If ``ensure_ascii`` is false, then the return value can contain non-ASCII characters if they appear in strings contained in ``obj``. Otherwise, all such characters are escaped in JSON strings. If ``check_circular`` is false, then the circular reference check for container types will be skipped and a circular reference will result in an ``OverflowError`` (or worse). If ``allow_nan`` is false, then it will be a ``ValueError`` to serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in strict compliance of the JSON specification, instead of using the JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). If ``indent`` is a non-negative integer, then JSON array elements and object members will be pretty-printed with that indent level. An indent level of 0 will only insert newlines. ``None`` is the most compact representation. If specified, ``separators`` should be an ``(item_separator, key_separator)`` tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and ``(',', ': ')`` otherwise. To get the most compact JSON representation, you should specify ``(',', ':')`` to eliminate whitespace. ``default(obj)`` is a function that should return a serializable version of obj or raise TypeError. The default simply raises TypeError. If *sort_keys* is true (default: ``False``), then the output of dictionaries will be sorted by key. To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the ``.default()`` method to serialize additional types), specify it with the ``cls`` kwarg; otherwise ``JSONEncoder`` is used. """ if cls is None: return _json.dumps(obj, 1, **kw) return cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii, check_circular=check_circular, allow_nan=allow_nan, indent=indent, separators=separators, default=default, sort_keys=sort_keys, **kw).encode(obj)
def smd(request): def get_args(method): from inspect import getargspec return [a for a in getargspec(method).args if a != "self"] smd = { "serviceType": "JSON-RPC", "serviceURL": reverse("jsonrpc_mountpoint"), "methods": [] } #TODO: write smd handler return HttpResponse(dumps(smd), mimetype="application/json")
def save(self, query, comment=None): query = dict(query) self._run_hooks("before_new_version", query) query["_comment"] = comment key = query["key"] # @@ save sends payload of application/json instead of form data data = simplejson.dumps(query) result = self._request("/save" + key, "POST", data) if result: self._invalidate_cache([result["key"]]) self._run_hooks("on_new_version", query) return result
def save(self, query, comment=None): query = dict(query) self._run_hooks('before_new_version', query) query['_comment'] = comment key = query['key'] #@@ save sends payload of application/json instead of form data data = simplejson.dumps(query) result = self._request('/save' + key, 'POST', data) if result: self._invalidate_cache([result['key']]) self._run_hooks('on_new_version', query) return result
def save(self, key, data, timestamp=None, comment=None, machine_comment=None, ip=None, author=None, transaction_id=None): timestamp = timestamp or datetime.datetime.utcnow() t = self.db.transaction() metadata = self.get_metadata(key) try: typekey = data['type'] type_id = self._key2id(typekey) if metadata: # already existing object revision = None thing_id = metadata.id olddata = simplejson.loads(self.get(key)) created = metadata.created action = "update" else: revision = 1 thing_id = self.new_thing(key=key, type=type_id, latest_revision=1, last_modified=timestamp, created=timestamp) olddata = {} created = timestamp action = "create" if transaction_id is None: transaction_id = self._add_transaction(action=action, author=author, ip=ip, comment=comment, created=timestamp) revision = self._add_version(thing_id=thing_id, revision=revision, transaction_id=transaction_id, created=timestamp) self._update_tables(thing_id, key, olddata, dict(data)) #@@ why making copy of data? data['created'] = created data['revision'] = revision data['last_modified'] = timestamp data['key'] = key data['id'] = thing_id data['latest_revision'] = revision data = common.format_data(data) self.db.update('thing', where='id=$thing_id', last_modified=timestamp, latest_revision=revision, type=type_id, vars=locals()) self.db.insert('data', seqname=False, thing_id=thing_id, revision=revision, data=simplejson.dumps(data)) except: t.rollback() self.cache.clear(local=True) raise else: t.commit() web.ctx.new_objects[key] = simplejson.dumps(data) return {'key': key, 'revision': revision}
def __call__(self, *args, **kwargs): params = kwargs if len(kwargs) else args if Any.kind(params) == Object and self.__version != '2.0': raise Exception('Unsupport arg type for JSON-RPC 1.0 ' '(the default version for this client, ' 'pass version="2.0" to use keyword arguments)') r = urllib.urlopen( self.__service_url, dumps({ "jsonrpc": self.__version, "method": self.__service_name, 'params': params, 'id': str(uuid.uuid1()) })).read() return loads(r)
def _response_root(self): accepted = self.headers.getheader('Accept') cluster_state = json.dumps(sync.cluster_state.get_state(), sort_keys=True, indent=2) if accepted.find('text/html') != -1: # Output in plain text return self._response_plain("%s\n%s\n" % (RESPONSE_ABOUT, cluster_state)) else: # Output in HTML # TODO: HTML, escaping # TODO: data return self._response_plain("HTML %s\n<pre>%s</pre>\n" % (RESPONSE_ABOUT, cluster_state))
def encode_notifications(tokens, notifications): """ Returns the encoded bytes of tokens and notifications tokens a list of tokens or a string of only one token notifications a list of notifications or a dictionary of only one """ fmt = "!BH32sH%ds" structify = lambda t, p: struct.pack(fmt % len(p), 0, 32, t, len(p), p) binaryify = lambda t: t.decode('hex') if type(notifications) is dict and type(tokens) in (str, unicode): tokens, notifications = ([tokens], [notifications]) if type(notifications) is list and type(tokens) is list: return ''.join(map(lambda y: structify(*y), ((binaryify(t), json.dumps(p, separators=(',',':'), ensure_ascii=False).encode('utf-8')) for t, p in zip(tokens, notifications))))
def save_many(self, docs, timestamp, comment, data, ip, author, action=None): action = action or "bulk_update" s = SaveImpl(self.db, self.schema, self.indexer, self.property_manager) # Hack to allow processing of json before using. Required for OL legacy. s.process_json = process_json docs = common.format_data(docs) changeset = s.save(docs, timestamp=timestamp, comment=comment, ip=ip, author=author, action=action, data=data) # update cache. # Use the docs from result as they contain the updated revision and last_modified fields. for doc in changeset.get('docs', []): web.ctx.new_objects[doc['key']] = simplejson.dumps(doc) return changeset
def save(self, query, comment=None, action=None, data=None): query = dict(query) self._run_hooks('before_new_version', query) query['_comment'] = comment query['_action'] = action query['_data'] = data key = query['key'] #@@ save sends payload of application/json instead of form data data = simplejson.dumps(query) result = self._request('/save' + key, 'POST', data) if result: self._invalidate_cache([result['key']]) self._run_hooks('on_new_version', query) return result
def write(self, action, sitename, timestamp, data): path = self.get_path(timestamp) dir = os.path.dirname(path) if not os.path.exists(dir): os.makedirs(dir) f = self._open(path, 'a') f.write( simplejson.dumps( dict(action=action, site=sitename, timestamp=timestamp.isoformat(), data=data))) f.write('\n') f.flush() #@@ optimize: call fsync after all modifications are written instead of calling for every modification os.fsync(f.fileno()) f.close()
def encode_notifications(tokens, notifications): """ Returns the encoded bytes of tokens and notifications tokens a list of tokens or a string of only one token notifications a list of notifications or a dictionary of only one """ fmt = "!BH32sH%ds" structify = lambda t, p: struct.pack(fmt % len(p), 0, 32, t, len(p), p) binaryify = lambda t: t.decode('hex') if type(notifications) is dict and type(tokens) in (str, unicode): tokens, notifications = ([tokens], [notifications]) if type(notifications) is list and type(tokens) is list: return ''.join( map(lambda y: structify(*y), ((binaryify(t), json.dumps(p, separators=(',', ':'))) for t, p in zip(tokens, notifications))))
def initialize(self): if not self.initialized(): t = self.db.transaction() id = self.new_thing(key="/type/type") last_modified = datetime.datetime.utcnow() data = dict( key="/type/type", type={"key": "/type/type"}, last_modified={"type": "/type/datetime", "value": last_modified}, created={"type": "/type/datetime", "value": last_modified}, revision=1, latest_revision=1, id=id, ) self.db.update("thing", type=id, where="id=$id", vars=locals()) self.db.insert("version", False, thing_id=id, revision=1) self.db.insert("data", False, thing_id=id, revision=1, data=simplejson.dumps(data)) t.commit()
def initialize(self): if not self.initialized(): t = self.db.transaction() id = self.new_thing(key='/type/type') last_modified = datetime.datetime.utcnow() data = dict( key='/type/type', type={'key': '/type/type'}, last_modified={'type': '/type/datetime', 'value': last_modified}, created={'type': '/type/datetime', 'value': last_modified}, revision=1, latest_revision=1, id=id ) self.db.update('thing', type=id, where='id=$id', vars=locals()) self.db.insert('version', False, thing_id=id, revision=1) self.db.insert('data', False, thing_id=id, revision=1, data=simplejson.dumps(data)) t.commit()
def encode_notifications_2(ids, tokens, notifications): """ Returns the encoded bytes of tokens and notifications to write ids into buffer, use return ids to del invalid tokens ids a list of ids or int of only one id tokens a list of tokens or a string of only one token notifications a list of notifications or a dictionary of only one """ #format cmd(uint8_t,B), id(uint32_t,I), expire(uint32_t,I),TokenLen(uint16_t,H),Token(32byte,32s),PayLen(uint16_t,H),PayLoad(payload,%ds) fmt = "!BIIH32sH%ds" structify = lambda d, t, p: struct.pack(fmt % len(p), 1, d, int(time.time()+86400), 32, t, len(p), p) tempStr = 'encode_notifications_2 identifier=%d token=%s' %(ids, tokens) log.msg(tempStr) # fmt = "!BIIH32sH%ds" # structify = lambda d, t, p: struct.pack(fmt % len(p), 1, d, int(time.time()+86400), 32, t, len(p), p) binaryify = lambda t: t.decode('hex') if type(notifications) is dict and type(tokens) in (str, unicode) and type(ids) is int: ids, tokens, notifications = ([ids], [tokens], [notifications]) if type(notifications) is list and type(tokens) is list and type(ids) is list: return ''.join(map(lambda y: structify(*y), ((d, binaryify(t), json.dumps(p, separators=(',',':'))) for d, t, p in zip(ids, tokens, notifications))))
def encode_notifications(tokens, notifications): """ Returns the encoded bytes of tokens and notifications tokens a list of tokens or a string of only one token notifications a list of notifications or a dictionary of only one """ global IDENTIFIER IDENTIFIER=IDENTIFIER+1 #format cmd(uint8_t,B), id(uint32_t,I), expire(uint32_t,I),TokenLen(uint16_t,H),Token(32byte,32s),PayLen(uint16_t,H),PayLoad(payload,%ds) fmt = "!BIIH32sH%ds" structify = lambda t, p: struct.pack(fmt % len(p), 1, IDENTIFIER, int(time.time()+86400), 32, t, len(p), p) tempStr = 'encode_notifications identifier=%d token=%s' %(IDENTIFIER, tokens) log.msg(tempStr) #fmt = "!BH32sH%ds" #structify = lambda t, p: struct.pack(fmt % len(p), 0, 32, t, len(p), p) binaryify = lambda t: t.decode('hex') if type(notifications) is dict and type(tokens) in (str, unicode): tokens, notifications = ([tokens], [notifications]) if type(notifications) is list and type(tokens) is list: return ''.join(map(lambda y: structify(*y), ((binaryify(t), json.dumps(p, separators=(',',':'))) for t, p in zip(tokens, notifications))))
def encode_notifications(tokens, notifications): """ Returns the encoded bytes of tokens and notifications tokens a list of tokens or a string of only one token notifications a list of notifications or a dictionary of only one """ fmt = "!BH32sH%ds" structify = lambda t, p: struct.pack(fmt % len(p), 0, 32, t, len(p), p) binaryify = lambda t: t.decode('hex') if type(notifications) is dict and type(tokens) in (str, unicode): tokens, notifications = ([tokens], [notifications]) for notification in notifications: alert = notification["aps"]["alert"] alert_ords = [ord(i) for i in alert] if max(alert_ords) > 256: new_alert = "".join(unichr(i) for i in alert_ords) else: new_alert = "".join(chr(i) for i in alert_ords).decode("utf-8") notification["aps"]["alert"] = alert if type(notifications) is list and type(tokens) is list: return ''.join(map(lambda y: structify(*y), ((binaryify(t), json.dumps(p, separators=(',',':'), ensure_ascii=False).encode('utf-8')) for t, p in zip(tokens, notifications))))
def get_many(self, keys): if not keys: return [] # simple hack to avoid crossing URL length limit. if len(keys) > 100: things = [] while keys: things += self.get_many(keys[:100]) keys = keys[100:] return things data = dict(keys=simplejson.dumps(keys)) result = self._request('/get_many', data=data) things = [] for key in keys: #@@ what if key is not there? if key in result: data = result[key] data = web.storage(common.parse_query(data)) self._cache[key, None] = data things.append(create_thing(self, key, self._process_dict(data))) return things
def things(self, query, details=False): query = simplejson.dumps(query) return self._request('/things', 'GET', { 'query': query, "details": str(details) })
process_exception(e) except Exception, e: common.record_exception() # call web.internalerror to send email when web.internalerror is set to web.emailerrors process_exception(common.InfobaseException(error="internal_error", message=str(e))) if web.ctx.get('infobase_localmode'): raise common.InfobaseException(message=str(e)) else: process_exception(e) if isinstance(d, JSON): result = d.json else: result = simplejson.dumps(d) if web.ctx.get('infobase_localmode'): return result else: # set auth-token as cookie for remote connection. if web.ctx.get('infobase_auth_token'): web.setcookie('infobase_auth_token', web.ctx.infobase_auth_token) return result return g def get_data(): if 'infobase_input' in web.ctx: return web.ctx.infobase_input else: return web.data()
def __str__(self): return simplejson.dumps(self.d)
def _encode(tokens, notifications): for t, p in zip(tokens, notifications): d = json.dumps(p, separators(',', ':'), ensure_ascii=False) y = (binaryify(t), d.encode('utf-8')) yield structify(*y)
def get(self, key, revision=None): return simplejson.dumps(self[key].format_data())
def recentchanges(self, query): query = simplejson.dumps(query) changes = self._request('/_recentchanges', 'GET', {'query': query}) return [Changeset.create(self, c) for c in changes]
common.record_exception() # call web.internalerror to send email when web.internalerror is set to web.emailerrors process_exception( common.InfobaseException(error="internal_error", message=str(e))) if web.ctx.get('infobase_localmode'): raise common.InfobaseException(message=str(e)) else: process_exception(e) if isinstance(d, JSON): result = d.json else: result = simplejson.dumps(d) t_end = time.time() totaltime = t_end - t_start querytime = web.ctx.pop('querytime', 0.0) queries = web.ctx.pop('queries', 0) if config.get("enabled_stats"): web.header( "X-STATS", "tt: %0.3f, tq: %0.3f, nq: %d" % (totaltime, querytime, queries)) if web.ctx.get('infobase_localmode'): return result else: # set auth-token as cookie for remote connection.
def update(self, d={}, **kw): d2 = dict(d, **kw) docs = [dict(doc, _key=key) for key, doc in d2.items()] self._request("_save_many", method="POST", data=simplejson.dumps(docs))
def __setitem__(self, key, data): return self._request(key, method='PUT', data=simplejson.dumps(data))