def temp_view(self, map_, reduce_=None, language='javascript', **kwargs): view = {"map": map_, "language": language} if isinstance(reduce_, basestring): view['reduce'] = reduce_ body = json.dumps(view) if not kwargs: path = self.db.uri+'_temp_view' else: for k, v in kwargs.iteritems(): if type(v) is bool: kwargs[k] = str(v).lower() if k in ['key', 'startkey', 'endkey']: kwargs[k] = json.dumps(v) query_string = urllib.urlencode(kwargs) path = self.path+'_temp_view' + '?' + query_string response = self.db.http.post(path, body=body) if response.status == 200: result = json.loads(response.body) return RowSet(self.db, result['rows'], offset=result['offset'], total_rows=result['total_rows']) else: raise TempViewException('Status: ' + str(response.status) + '\nBody: ' + response.body)
def temp_view(self, map_, reduce_=None, language='javascript', **kwargs): view = {"map": map_, "language": language} if isinstance(reduce_, basestring): view['reduce'] = reduce_ body = json.dumps(view) if not kwargs: path = self.db.uri + '_temp_view' else: for k, v in kwargs.iteritems(): if type(v) is bool: kwargs[k] = str(v).lower() if k in ['key', 'startkey', 'endkey']: kwargs[k] = json.dumps(v) query_string = urllib.urlencode(kwargs) path = self.path + '_temp_view' + '?' + query_string response = self.db.http.post(path, body=body) if response.status == 200: result = json.loads(response.body) return RowSet(self.db, result['rows'], offset=result['offset'], total_rows=result['total_rows']) else: raise TempViewException('Status: ' + str(response.status) + '\nBody: ' + response.body)
def __call__(self, keys=None, **kwargs): # for k, v in kwargs.items(): # if type(v) is bool: # kwargs[k] = str(v).lower() # if k in ['key', 'startkey', 'endkey']: # kwargs[k] = json.dumps(v) qs = {} for k, v in kwargs.iteritems(): if 'docid' not in k and k != 'stale': qs[k] = json.dumps(v) else: qs[k] = v query_string = urllib.urlencode(qs) if len(query_string) is not 0: path = self.path + '?' + query_string else: path = self.path if not keys: response = self.db.http.get(path) else: response = self.db.http.post(path, body=json.dumps({'keys': keys})) result = json.loads(response.body) if response.status == 200: return RowSet(self.db, result['rows'], offset=result.get('offset', None), total_rows=result.get('total_rows')) else: raise ViewException(result)
def fb_save_base(): op = OptionParser(usage='%prog [options] baseid') op.disable_interspersed_args() op.add_option('-s', '--service', dest='service_host', metavar='HOST', default="freebase.com", help='Freebase HTTP service address:port') op.add_option('-S', '--sandbox', dest='use_sandbox', default=False, action='store_true', help='shortcut for --service=sandbox-freebase.com') options, args = op.parse_args() service_host = options.service_host if options.use_sandbox: service_host = "sandbox-freebase.com" if len(args) < 1: op.error('Required baseid missing') if len(args) > 1: op.error('Too many arguments') s = HTTPMetawebSession(service_host) print json.dumps(dump_base(s, args[0]), indent=2)
def term_facet(host='localhost:9200', terms=['bibleverse'], _type='habakkuk', date_filter=[], size=10): ret = [] conn = ES(host) q = MatchAllQuery() if date_filter: start,end = date_filter q = FilteredQuery(q, RangeFilter(qrange=ESRange('created_at_date',start,end,include_upper=False))) q = q.search(size=0) for term in terms: q.facet.add_term_facet(term,order='count',size=size) print json.dumps(json.loads(q.to_search_json()),indent=2) resultset = conn.search(query=q, indices=_type+'-*', doc_types=[_type]) for facet in resultset.facets: print "Total",facet,resultset.facets[facet]['total'] for row in resultset.facets[facet]['terms']: print "\t",row['term'],row['count'] ret.append((facet,row['term'])) return ret
def remove_user(request): uid = request.REQUEST.get('uid', '') try: u = ClientUser.objects.get(pk=uid) u.delete() return HttpResponse(json.dumps({'id': uid, 'valid': True, 'message': ''}), mimetype="application/json") except: #logging.info("Error deleting user", exc_info=True) return HttpResponse(json.dumps({'valid': False, 'message': _('Error deleting user')}), mimetype="application/json")
def test_parse(self): # test in/out equivalence and parsing res = json.loads(JSON) out = json.dumps(res) self.assertEquals(res, json.loads(out)) try: json.dumps(res, allow_nan=False) except ValueError: pass else: self.fail("23456789012E666 should be out of range")
def mqlwrite(self, sq, attribution_id=None, use_permission_of=None, **envelope): """do a mql write. For a more complete description, see http://www.freebase.com/docs/web_services/mqlwrite""" query = envelope.copy() query.update(query=sq, escape=False) if use_permission_of: query['use_permission_of'] = use_permission_of if attribution_id: # strange badly named api query['attribution'] = attribution_id qstr = json.dumps(query, separators=SEPARATORS) self.log.debug('MQLWRITE: %s', qstr) service = '/api/service/mqlwrite' self.log.info('%s: %s', service, Delayed(logformat, sq)) r = self._httpreq_json(service, 'POST', form=dict(query=qstr)) self.log.debug('MQLWRITE RESP: %r', r) return self._mqlresult(r)
def dump(start,end,backupdir,eshost): conn = ES(eshost) out = file('/tmp/out.json','w') _type = 'habakkuk' q = MatchAllQuery() q = FilteredQuery(q, RangeFilter(qrange=ESRange('created_at_date',start,end,include_upper=False))) q = q.search() # print json.dumps(json.loads(q.to_search_json()),indent=2) resultset = conn.search(query=q,indices=_type+"-*", doc_types=[_type], scan=True) cnt=0 if not resultset.total: sys.stderr.write("no data for %s - %s\n"%(start,end)) return try: sys.stderr.write("Will write %d lines to %s\n"%(resultset.total, out.name)) while True: r = resultset.next() cnt+=1 out.write(json.dumps(r)+'\n') except StopIteration: pass out.close() # gzip ext = datetime.strftime(start,'%Y-%m-%d') backup = os.path.join(backupdir,"habakkuk-%s.json.gz"%ext) f_in = open(out.name,'rb') f_out = gzip.open(backup,'wb') f_out.writelines(f_in) f_out.close() f_out.close() sys.stderr.write("Created %s\n"%backup)
def default(self, *args, **kwargs): """ Nimmt die JSON-RPC-Anfrage entgegen und übergibt sie an die entsprechende JSON-RPC-Methode. """ responses = [] # Response content type -> JSON set_content_type_json() # Get data if cherrypy.request.method == "GET": data = kwargs if "params" in data: if self.debug: cherrypy.log("") cherrypy.log(u"params (raw): " + repr(data["params"])) cherrypy.log("") try: data["params"] = json.loads(data["params"]) except _ParseError, err: traceback_info = "".join(traceback.format_exception(*sys.exc_info())) cherrypy.log(traceback_info) return json.dumps( ParseErrorResponse( data = unicode(err) ).to_dict() ) requests = [data]
def mqlreadmulti(self, queries, asof=None): """read a structure query""" keys = [('q%d' % i) for i, v in enumerate(queries)] envelope = {} for i, sq in enumerate(queries): subq = dict(query=sq, escape=False) if asof: subq['as_of_time'] = asof # XXX put this back once mqlreadmulti is working in general #if isinstance(sq, list): # subq['cursor'] = True envelope[keys[i]] = subq service = '/api/service/mqlread' self.log.info('%s: %s', service, Delayed(logformat, envelope)) qstr = json.dumps(envelope, separators=SEPARATORS) rs = self._httpreq_json(service, 'POST', form=dict(queries=qstr)) self.log.info('%s result: %s', service, Delayed(json.dumps, rs, indent=2)) return [self._mqlresult(rs[key]) for key in keys]
def mqlreadmulti(self, queries, asof=None): """read a structure query""" keys = [('q%d' % i) for i,v in enumerate(queries)]; envelope = {} for i,sq in enumerate(queries): subq = dict(query=sq, escape=False) if asof: subq['as_of_time'] = asof # XXX put this back once mqlreadmulti is working in general #if isinstance(sq, list): # subq['cursor'] = True envelope[keys[i]] = subq service = '/api/service/mqlread' self.log.info('%s: %s', service, Delayed(logformat, envelope)) qstr = json.dumps(envelope, separators=SEPARATORS) rs = self._httpreq_json(service, 'POST', form=dict(queries=qstr)) self.log.info('%s result: %s', service, Delayed(json.dumps, rs, indent=2)) return [self._mqlresult(rs[key]) for key in keys]
def bulk(self, docs, all_or_nothing=False): body = {'docs': list(docs), 'all_or_nothing': all_or_nothing} response = self.http.post('_bulk_docs', body=json.dumps(body)) if response.status == 201: return json.loads(response.body) else: raise CouchDBException("Bulk update failed "+response.body)
def facets(host='localhost:9200', facet_terms=['bibleverse'], _type='habakkuk', date_filter=[], size=10): ret = {} conn = ES(host) q = MatchAllQuery() if date_filter: start,end = date_filter q = FilteredQuery(q, RangeFilter(qrange=ESRange('created_at_date', start.isoformat(), end.isoformat(), include_upper=False))) q = q.search(size=0) for term in facet_terms: q.facet.add_term_facet(term,order='count',size=size) es_logger.info(q.serialize()) resultset = conn.search(query=q, indices=_type+'-*', doc_types=[_type]) for facet in resultset.facets: ret[facet] = [] for row in resultset.facets[facet]['terms']: ret[facet].append({"value":row['term'],"count":row['count']}) logger.debug("facets return|'%s'"%json.dumps(ret)) return ret
def mqlwrite(self, sq, attribution_id=None, use_permission_of=None, **envelope): """do a mql write. For a more complete description, see http://www.freebase.com/docs/web_services/mqlwrite""" query = envelope.copy() query.update(query=sq, escape=False) if use_permission_of: query['use_permission_of'] = use_permission_of if attribution_id: # strange badly named api query['attribution'] = attribution_id qstr = json.dumps(query, separators=SEPARATORS) self.log.debug('MQLWRITE: %s', qstr) service = '/api/service/mqlwrite' self.log.info('%s: %s', service, Delayed(logformat,sq)) r = self._httpreq_json(service, 'POST', form=dict(query=qstr)) self.log.debug('MQLWRITE RESP: %r', r) return self._mqlresult(r)
def mqlreaditer(self, sq, asof=None): """read a structure query.""" cursor = True service = '/api/service/mqlread' if isinstance(sq, (tuple, list)): if len(sq) > 1: raise MetawebError("You cannot ask mqlreaditer a query in the form: [{}, {}, ...], just [{}] or {}") sq = sq[0] while 1: subq = dict(query=[sq], cursor=cursor, escape=False) if asof: subq['as_of_time'] = asof qstr = json.dumps(subq, separators=SEPARATORS) r = self._httpreq_json(service, 'POST', form=dict(query=qstr)) for item in self._mqlresult(r): yield item if r['cursor']: cursor = r['cursor'] self.log.info('CONTINUING with %s', cursor) else: return
def mqlreaditer(self, sq, asof=None, headers=None, escape=False, **envelope): """read a structure query.""" cursor = '' service = '/api/service/mqlread' if isinstance(sq, (tuple, list)): if len(sq) > 1: raise MetawebError("You cannot ask mqlreaditer a query in the form: [{}, {}, ...], just [{}] or {}") sq = sq[0] while 1: subq = envelope.copy() subq.update(query=json.dumps([sq]), cursor=cursor, html_escape=escape) if asof: subq['as_of_time'] = asof r = self._httpreq_json(service, 'GET', form=subq) for item in self._mqlresult(r): yield item if r['cursor']: cursor = r['cursor'] self.log.info('CONTINUING with %s', cursor) else: return
def mqlreaditer(self, sq, asof=None): """read a structure query.""" cursor = True service = '/api/service/mqlread' if isinstance(sq, (tuple, list)): if len(sq) > 1: raise MetawebError( "You cannot ask mqlreaditer a query in the form: [{}, {}, ...], just [{}] or {}" ) sq = sq[0] while 1: subq = dict(query=[sq], cursor=cursor, escape=False) if asof: subq['as_of_time'] = asof qstr = json.dumps(subq, separators=SEPARATORS) r = self._httpreq_json(service, 'POST', form=dict(query=qstr)) for item in self._mqlresult(r): yield item if r['cursor']: cursor = r['cursor'] self.log.info('CONTINUING with %s', cursor) else: return
def jsonp_transform(request, *args, **kwargs): response = v(request, *args, **kwargs) #assert isinstance(response, HttpResponse), "The function MUST return an HttpResponse object" if 'callback' in request.REQUEST and response.status_code == 200: cb = request.REQUEST['callback'] response['Content-type'] = 'application/json' if not validate_jsonp.is_valid_jsonp_callback_value(cb): return HttpResponse(json.dumps({'valid': False, 'message': '%s is not a valid jsonp callback identifier' % cb, 'status': 400}), mimetype='application/json') response.content = (u'%s(%s)' % (cb, response.content.decode('utf-8'))) return response elif 'callback' in request.REQUEST and response.status_code >= 400: #is an error return HttpResponse(json.dumps({'valid': False, 'message': response.content, 'status': response.status_code}), mimetype='application/json') else: return HttpResponse(json.dumps({'valid': False, 'message': 'No jsonp callback provided'}), mimetype='application/json')
def _check_mqlerror(self, r): if r.code != '/api/status/ok': for msg in r.messages: self.log.error('mql error: %s %s %r' % (msg.code, msg.message, msg.get('query', None))) raise MetawebError, 'query failed: %s\n%s\n%s' % ( r.messages[0].code, r.messages[0].message, json.dumps(r.messages[0].get('query', None), indent=2))
def maybe_dumps(s): """ If the given value is a json structure, encode it as a json string. Otherwise leave it as is. """ if isinstance(s, (dict, list)): return json.dumps(s) return s
def jsonOut(jj, path): try: jsondata = json.dumps(jj) myFile = open(path,'wb') myFile.write(jsondata) myFile.close() except: print >> sys.stderr, 'ERROR writing JSON file', myFile
def get_users(request): """Given an app, get it's users""" if not 'appId' in request.REQUEST: return HttpResponseBadRequest() else: app= ClientApp.get_for_token(request.REQUEST['appId']) return HttpResponse(json.dumps([{'k': e.clientId, 'val': e.clientId} for e in app.users.iterator()], ensure_ascii=False), mimetype="application/json")
def logformat(result): """ Format the dict/list as a json object """ rstr = json.dumps(result, indent=2) if rstr[0] == '{': rstr = rstr[1:-2] return rstr
def cmd_dump_type(fb, typeid, follow_types=True): """dump a type to stdout %prog dump_type typeid [follow_types=True] Dump a type by outputting a json representation of the type and properties involved. """ print >> sys.stdout, json.dumps(dump_type(fb.mss, typeid, follow_types), indent=2)
def cmd_dump_base(fb, baseid): """dump a base to stdout %prog dump_base baseid Dump a base by outputting a json representation of the types and properties involved. """ print >> sys.stdout, json.dumps(dump_base(fb.mss, baseid), indent=2)
def all(self, keys=None, include_docs=True, **kwargs): kwargs['include_docs'] = include_docs qs = '&'.join( k+'='+json.dumps(v) for k,v in kwargs.iteritems() ) if keys: response = self.db.http.post('_all_docs?' + qs, body=json.dumps({"keys": keys})) else: response = self.db.http.get('_all_docs?' + qs) if response.status == 200: result = json.loads(response.body) # Normalize alldocs to a standard view result for RowSet for row in result['rows']: if 'doc' in row: row['rev'] = row['value']['rev'] row['value'] = row['doc'] return RowSet(self.db, result['rows'], offset=result.get('offset', None), total_rows=result.get('total_rows', None)) else: raise Exception(response.body)
def _check_mqlerror(self, r): if r.code != "/api/status/ok": for msg in r.messages: self.log.error("mql error: %s %s %r" % (msg.code, msg.message, msg.get("query", None))) raise MetawebError, "query failed: %s\n%s\n%s" % ( r.messages[0].code, r.messages[0].message, json.dumps(r.messages[0].get("query", None), indent=2), )
def user_info(self, mql_output=None): """ get user_info. For a more complete description, see http://www.freebase.com/view/guid/9202a8c04000641f800000000c36a842""" service = "/api/service/user_info" qstr = json.dumps(mql_output, separators=SEPARATORS) r = self._httpreq_json(service, "POST", form=dict(mql_output=qstr)) return r
def rank_phrases_and_store(doc): """ rank phrases by cluster size, score and es_score, etc :param doc: :return: """ ret = [] rank = 1 # save the 'date' _date = doc['date'] import sys # loop over clusters, ranked by the topic_sort_key for cluster in sorted(doc['cluster_topics'], key=topic_sort_key): for topic in cluster.get('topics', []): # now perform hierarchical clustering to group phrases # similar phrases together. phrase_clusters = hac(topic) for cluster in phrase_clusters: # now get the first entry from each phrase_cluster # assumes the other entries and similar enough to ignore phrase = sorted(cluster, key=phrase_sort_key, reverse=True)[0] # See phrase_search() for is_spam meaning if phrase.get('is_spam'): continue # append to the list with its rank ret.append({ 'phrase' : phrase['es_phrase'], 'bibleverse' : phrase['bibleverse'], "search_text" : "+".join(phrase['text'].split()), "rank" : rank, "date" : _date }) rank += 1 # Now store the ranked results in ES conn = get_es_connection() es_settings = ESSettings() q = TermQuery(field='date', value=_date) result = conn.delete_by_query(es_settings.topics_index, [es_settings.phrases_es_type], q) logger.info("[rank_results] : delete complete. index=%s, type=%s, query='%s'", es_settings.topics_index, es_settings.phrases_es_type, json.dumps(q.search().serialize())) for phrase_doc in ret: conn.index(doc=phrase_doc, index=es_settings.topics_index, doc_type=es_settings.phrases_es_type) logger.info("Wrote %d docs to index=%s, type=%s, date=%s", len(ret), es_settings.topics_index, es_settings.phrases_es_type, _date)
def detail_json(request, event_id): """Show raw JSON doc""" if request.GET.get('type'): types = [request.GET.get('type')] else: types = _TYPES result = get_details(event_id, types) return HttpResponse(json.dumps(result, indent=2, sort_keys=True), mimetype='application/json; charset=UTF-8')
def set_profile(request): """Temporal, only for the demo; circumvent the jsonp convention and just get the user's graph This view can only be called from the same domain, if it is a jsonp call, it will fail, so, that's kinda secure. """ if hasattr(request, 'profile'): graph = _get_profile_graph(request.profile) return HttpResponse(json.dumps({'graph': graph}), mimetype='application/json') else: return HttpResponseBadRequest('No profile could be set')
def handle_notifications(doc, req): db = Database(req['db']) doc = json.loads(req['body']) doc['_id'] = str(uuid.uuid1()) doc['type'] = 'notification' jobs = func(doc, req, db) for job in jobs: job['parent-notification'] = doc['_id'] info = db.create(job) job['_id'] = info['id'] doc['jobs'] = jobs return doc, json.dumps(doc)
def test_topics_api_view(self): mock_return = { 'count': 5, 'topics' : [ { "bibleverse" : "luke 6:27", "phrases" : [ { "es_phrase": "love your enemies, do good to those who hate you", "bibleverse": "luke 6:27", "search_url" : "http://localhost:8000/biblestudy/?search=enemies+good" } ] }, { "bibleverse": "matthew 6:34", "phrases" : [ { "es_phrase": "don\u2019t worry about tomorrow", "bibleverse" : "matthew 6:34", "search_url" : "http://localhost:8000/biblestudy/?search=worry+tomorrow" } ] }, { "bibleverse" : "matthew 8:8", "phrases" : [{ "es_phrase": "some more text", "bibleverse" : "matthew 8:8", "search_url" : "http://localhost:8000/biblestudy/?search=worry+tomorrow" }], } ] } with patch('web.views.get_topics', return_value=mock_return) as mock_get_topics: client = Client() response = client.post('/api/topics/', content_type="application/json", data=json.dumps({'size':10, 'offset': 99})) try: ret = json.loads(response.content) except: self.fail("Could not parse the response from topics_api \n{}".format(response.content)) self.assertEquals(200, response.status_code) self.assertTrue(ret["topic_results"]) self.assertTrue(ret['topic_results'].get('count')) self.assertTrue(ret['topic_results'].get('topics')) self.assertEquals(99, ret['offset']) self.assertTrue(mock_get_topics.called)
def wrap(req, *args, **kwargs): try: j = json.loads(req.raw_post_data) except ValueError: #this means that the necessary data is in the request.REQUEST #j = None j = req.REQUEST or None resp = func(req, j, *args, **kwargs) if isinstance(resp, HttpResponse): return resp return HttpResponse(json.dumps(resp, ensure_ascii=False), mimetype="application/json")
def create(self, doc, all_or_nothing=False): """Create a document. Accepts any object that can be converted in to a dict. If multiple documents are passed they are handed off to the bulk document handler. """ if type(doc) not in (dict, Document, list, tuple, types.GeneratorType, RowSet): doc = dict(doc) # Hand off to bulk handler when passing multiple documents if type(doc) in (list, tuple, types.GeneratorType, RowSet): return self.bulk(doc, all_or_nothing=all_or_nothing) response = self.http.post('', body=json.dumps(doc)) if response.status == 201: return json.loads(response.body) else: raise CouchDBException(response.body)
def http(environ, respond): path = environ['PATH_INFO'] or '' if path and path[0] == '/': path = path[1:] path = path.split('/', 1)[0] if path == 'debug': r = { 'listeners': map(repr, listeners.keys()), 'subscriptions': subscriptions, } respond('200 OK', [('Content-Type', 'text/json')]) return [dumps(r)] else: proc(environ['wsgi.input'].read()) respond('204 OK', []) return ''
def mqlcheck(self, sq): """ See if a write is valid, and see what would happen, but do not actually do the write """ query = dict(query=sq, escape=False) qstr = json.dumps(query, separators=SEPARATORS) self.log.debug('MQLCHECK: %s', qstr) service = '/api/service/mqlcheck' self.log.info('%s: %s', service, Delayed(logformat, sq)) r = self._httpreq_json(service, 'POST', form=dict(query=qstr)) self.log.debug('MQLCHECK RESP: %r', r) return self._mqlresult(r)
def mqlread(self, sq, asof=None): """read a structure query. For a more complete description, see http://www.freebase.com/view/en/api_service_mqlread""" subq = dict(query=sq, escape=False) if asof: subq['as_of_time'] = asof if isinstance(sq, list): subq['cursor'] = True service = '/api/service/mqlread' self.log.info('%s: %s', service, Delayed(logformat, sq)) qstr = json.dumps(subq, separators=SEPARATORS) r = self._httpreq_json(service, 'POST', form=dict(query=qstr)) return self._mqlresult(r)
def mqlread(self, sq, asof=None, headers=None, escape=True, **envelope): """read a structure query. For a more complete description, see http://www.freebase.com/docs/web_services/mqlread""" subq = envelope.copy() # TODO: warn that default changed for escape? if headers: raise Exception("Headers parameter not supported") subq.update(query=json.dumps(sq), html_escape=escape) if asof: subq['as_of_time'] = asof if isinstance(sq, list): subq['cursor'] = '' service = '/api/service/mqlread' self.log.info('%s: %s', service, Delayed(logformat, sq)) r = self._httpreq_json(service, 'GET', form=subq) return self._mqlresult(r)
def mqlreaditer(self, sq, asof=None, headers=None, escape=False, **envelope): """read a structure query.""" cursor = '' service = '/api/service/mqlread' if isinstance(sq, (tuple, list)): if len(sq) > 1: raise MetawebError( "You cannot ask mqlreaditer a query in the form: [{}, {}, ...], just [{}] or {}" ) sq = sq[0] while 1: subq = envelope.copy() subq.update(query=json.dumps([sq]), cursor=cursor, html_escape=escape) if asof: subq['as_of_time'] = asof r = self._httpreq_json(service, 'GET', form=subq) for item in self._mqlresult(r): yield item if r['cursor']: cursor = r['cursor'] self.log.info('CONTINUING with %s', cursor) else: return
def printLine(obj): print json.dumps(obj)
if not json_msg.get(k): continue json_msg[k] = json_msg[k].replace('\n', '').replace('\t', '').replace('\r' , '').replace(' ', '').decode('utf-8', 'ignore').split(',') try: for part in msg.walk(): json_part = {} if part.get_content_maintype() == 'multipart': continue json_part['contentType'] = part.get_content_type() content = part.get_payload(decode=False).decode('utf-8', 'ignore') json_part['content'] = cleanContent(content) json_msg['parts'].append(json_part) except Exception, e: sys.stderr.write('Skipping message - error encountered (%s)' % (str(e), )) finally: return json_msg # Note: opening in binary mode is recommended mbox = mailbox.UnixMailbox(open(MBOX, 'rb'), email.message_from_file) json_msgs = [] while 1: msg = mbox.next() if msg is None: break json_msgs.append(jsonifyMessage(msg)) print json.dumps(json_msgs, indent=4)
try: for part in msg.walk(): json_part = {} if part.get_content_maintype() == 'multipart': continue json_part['contentType'] = part.get_content_type() content = part.get_payload(decode=False).decode('utf-8', 'ignore') json_part['content'] = cleanContent(content) json_msg['parts'].append(json_part) except Exception, e: sys.stderr.write('Skipping message - error encountered (%s)' % (str(e), )) finally: return json_msg #Note: opening in binary mode is recommended mbox = mailbox.UnixMailbox(open(MBOX, 'rb'), email.message_from_file) def gen_json_msgs(m_box): while 1: msg = m_box.next() if msg is None: break yield jsonifyMessage(msg) if OUT_FILE: json.dump(gen_json_msgs(mbox),open(OUT_FILE, 'wb'), indent=4) else: print json.dumps(gen_json_msgs(mbox), indent=4)
def gen_json_msgs(m_box): while 1: msg = m_box.next() if msg is None: break print json.dumps(jsonifyMessage(msg))
# Note that you need to include imports used by your mapper # inside the function definition from dateutil.parser import parse from datetime import datetime as dt if doc.get('Date'): # [year, month, day, hour, min, sec] _date = list(dt.timetuple(parse(doc['Date']))[:-3]) yield (_date, doc) # Specify an index to back the query. Note that the index won't be # created until the first time the query is run view = ViewDefinition('index', 'by_date_time', dateTimeToDocMapper, language='python') view.sync(db) # Now query, by slicing over items sorted by date start = [int(i) for i in START_DATE.split("-")] end = [int(i) for i in END_DATE.split("-")] print 'Finding docs dated from %s-%s-%s to %s-%s-%s' % tuple(start + end) docs = [] for row in db.view('index/by_date_time', startkey=start, endkey=end): docs.append(db.get(row.id)) print json.dumps(docs, indent=4)
def __init__(self, r): self.name = r._meta["id"] self.data = json.dumps(r) self.type = r._meta["type"] self.record = r
def jsonlib_dumps(o, **kwargs): """This one is separate because jsonlib doesn't allow specifying separators. See jsonlib.dumps for details on kwargs. """ return _squash_unicode(jsonmod.dumps(o, **kwargs))
def dumps(o, **kwargs): """Serialize object to JSON str. See %s.dumps for details on kwargs. """ % jsonmod return _squash_unicode(jsonmod.dumps(o, separators=(',', ':'), **kwargs))