def main(mountpoint='~', debug=False): mountpoint = expanduser(mountpoint) init_logging(debug) daemon = VmReadFSd(mountpoint) line = stdin.readline() while line: log.debug("in %s", line.strip()) try: line = decode_json(line) for method, parameters in line.items(): if method in set(('flush', 'getattr', 'open', 'opendir', 'readdir', 'release', 'releasedir', 'read', 'lookup', 'debug', )): method = getattr(daemon, method) line = method(*parameters) line = encode_json(dict(result=line)) print(line) stdout.flush() else: raise Exception('unsupported method "{}"'.format(method)) except OSError as e: log.exception(str(e)) line = encode_json(dict(error=(e.errno, ))) print(line) stdout.flush() except Exception as e: log.exception(str(e)) line = encode_json(dict(error=(EBADMSG, ))) print(line) stdout.flush() log.debug("out %s", line) line = stdin.readline()
def _send_receive(self, msg): self.daemon.stdin.write(bytes(encode_json(msg) + '\n', 'utf-8')) self.daemon.stdin.flush() msg = decode_json(str(self.daemon.stdout.readline(), 'utf-8')) if 'error' in msg: raise FUSEError(msg['error'][0]) return msg['result']
def data_refresh(ctx, key=''): is_auth(ctx, key) try: updated, atlas_data, mosaic_data, full, skipped = parse_data() updated = norm_datetime(updated) batch = []; add_entry = batch.append i = 0 for ident, name, tags, twitter, json in full: entry = Entry(key=create_key('Entry', ident)) entry.json = json entry.name = name entry.tags = tags entry.twitter = twitter entry.updated = updated i += 1 add_entry(entry) if i == 100: db.put(batch) del batch[:] i = 0 if batch: db.put(batch) atlas = Meta.get_or_insert('atlas') atlas.updated = updated atlas.json = encode_json(atlas_data, separators=(',', ':')) mosaic = Meta.get_or_insert('mosaic') mosaic.updated = updated mosaic.json = encode_json(mosaic_data, separators=(',', ':')) db.put([atlas, mosaic]) memcache.set_multi({ 'atlas.data': atlas.json, 'mosaic.data': mosaic.json, }, 60) except Exception, err: logging.critical(''.join(format_exception(*sys.exc_info()))) traceback = ''.join(html_format_exception()) return { 'alert': "Error updating data: <div class='skip-entry'>%s</div>" % traceback, 'skipped': [] }
def types_refresh(ctx, key=''): is_auth(ctx, key) try: updated, data, skipped = parse_types_data() meta = Meta.get_or_insert('types') meta.updated = norm_datetime(updated) meta.json = encode_json(data, separators=(',', ':')) meta.put() memcache.set('types.data', meta.json, 60) except Exception, err: logging.critical(''.join(format_exception(*sys.exc_info()))) traceback = ''.join(html_format_exception()) return { 'alert': "Error updating types: <div class='skip-entry'>%s</div>" % traceback, 'skipped': [] }
def get_images(): images = Meta.get_or_insert('images') if (not images) or (not images.json): images_data = {} cursor = None while 1: query = Image.all() if cursor: query = query.with_cursor(cursor) cursor = None i = 0 for img in query.fetch(limit=101): i += 1 if i == 100: cursor = query.cursor() images_data[img.key().name()] = [img.width, img.height] if not cursor: break images.json = encode_json(images_data) images.put() images = images.json memcache.add('images', images, 60) return images
def manifesto(ctx, profile=None): ctx.load_script = 'loadSlides' ctx.page = 'manifesto' prefix = 'manifesto-slide-' tweet_key = 'tweet.%s' % TWITTER_HOME_ACCOUNT data = memcache.get_multi('manifesto.slides', tweet_key) if 'manifesto.slides' in data: slides = data['manifesto.slides'] else: slides = [] for img in Image.all().filter( '__key__ >=', create_key('Image', prefix) ).fetch(limit=10): key = img.key().name() if not key.startswith(prefix): break slides.append([key, img.width, img.height]) slides = encode_json(slides) memcache.set('manifesto.data', slides, 60) if tweet_key in data: tweet = data[tweet_key] else: tweet = get_tweet(TWITTER_HOME_ACCOUNT) return {'slides': slides, 'tweet': tweet}
def handle_http_request( env, start_response, dict=dict, isinstance=isinstance, urlunquote=urlunquote, unicode=unicode, get_response_headers=lambda: None ): reqlocal.template_error_traceback = None try: http_method = env['REQUEST_METHOD'] ssl_mode = env['wsgi.url_scheme'] == 'https' if http_method == 'OPTIONS': start_response(*RESPONSE_OPTIONS) return [] if http_method not in SUPPORTED_HTTP_METHODS: start_response(*RESPONSE_NOT_IMPLEMENTED) return [] _path_info = env['PATH_INFO'] if isinstance(_path_info, unicode): _args = [arg for arg in _path_info.split(u'/') if arg] else: _args = [ unicode(arg, 'utf-8', 'strict') for arg in _path_info.split('/') if arg ] kwargs = {} for part in [ sub_part for part in env['QUERY_STRING'].lstrip('?').split('&') for sub_part in part.split(';') ]: if not part: continue part = part.split('=', 1) if len(part) == 1: value = None else: value = part[1] key = urlunquote(part[0].replace('+', ' ')) if value: value = unicode( urlunquote(value.replace('+', ' ')), 'utf-8', 'strict' ) else: value = None if key in kwargs: _val = kwargs[key] if isinstance(_val, list): _val.append(value) else: kwargs[key] = [_val, value] continue kwargs[key] = value ctx = Context(env, ssl_mode) router = handle_http_request.router if router: _info = router(ctx, _args, kwargs) if not _info: logging.error("No handler found for: %s" % _path_info) raise NotFound name, args = _info else: if _args: name = _args[0] args = _args[1:] else: name = '/' args = () if name not in HANDLERS: logging.error("Handler not found: %s" % name) raise NotFound handler, renderers, config = HANDLERS[name] json = config['json'] # Parse the POST body if it exists and is of a known content type. if http_method == 'POST': content_type = env.get('CONTENT-TYPE', '') if not content_type: content_type = env.get('CONTENT_TYPE', '') if ';' in content_type: content_type = content_type.split(';', 1)[0] if json or content_type == 'application/json': payload = json_decode(env['wsgi.input'].read()) if json and not (json is True): kwargs[json] = payload else: kwargs.update(payload) elif content_type in VALID_REQUEST_CONTENT_TYPES: post_environ = env.copy() post_environ['QUERY_STRING'] = '' if config['post_encoding']: ctx.request_body = env['wsgi.input'].read() env['wsgi.input'] = StringIO(ctx.request_body) post_encoding = config['post_encoding'] else: post_encoding = 'utf-8' post_data = FieldStorage( environ=post_environ, fp=env['wsgi.input'], keep_blank_values=True ).list or [] for field in post_data: key = field.name if field.filename: if config['blob']: value = parse_blob_info(field) else: value = field else: value = unicode(field.value, post_encoding, 'strict') if key in kwargs: _val = kwargs[key] if isinstance(_val, list): _val.append(value) else: kwargs[key] = [_val, value] continue kwargs[key] = value def get_response_headers(): # Figure out the HTTP headers for the response ``cookies``. cookie_output = SimpleCookie() for name, values in ctx._response_cookies.iteritems(): name = str(name) cookie_output[name] = values.pop('value') cur = cookie_output[name] for key, value in values.items(): if key == 'max_age': key = 'max-age' if key not in COOKIE_KEY_NAMES: continue cur[key] = value if cookie_output: raw_headers = ctx._raw_headers + [ ('Set-Cookie', ck.split(' ', 1)[-1]) for ck in str(cookie_output).split('\r\n') ] else: raw_headers = ctx._raw_headers str_headers = []; new_header = str_headers.append for k, v in raw_headers: if isinstance(k, unicode): k = k.encode('utf-8') if isinstance(v, unicode): v = v.encode('utf-8') new_header((k, v)) return str_headers if 'submit' in kwargs: del kwargs['submit'] if 'callback' in kwargs: ctx.json_callback = kwargs.pop('callback') if env.get('HTTP_X_REQUESTED_WITH') == 'XMLHttpRequest': ctx.ajax_request = 1 if '__ajax__' in kwargs: ctx.ajax_request = 1 del kwargs['__ajax__'] if config['ssl'] and RUNNING_ON_GOOGLE_SERVERS and not ssl_mode: raise NotFound if config['xsrf']: if 'xsrf' not in kwargs: raise AuthError("XSRF token not present.") provided_xsrf = kwargs.pop('xsrf') if not secure_string_comparison(provided_xsrf, ctx.xsrf_token): raise AuthError("XSRF tokens do not match.") if config['admin'] and not ctx.is_admin: raise NotFound if (not config['anon']) and (not ctx.user_id): if ctx.ajax_request: ctx.response_headers['Content-Type'] = 'application/json' raise HTTPContent(encode_json({ "error": { "type": "AuthError", "redirect": ctx.get_login_url() } })) raise Redirect(ctx.get_login_url()) # Try and respond with the result of calling the handler. content = handler(ctx, *args, **kwargs) for renderer in renderers: if ctx.end_pipeline: break if content is None: content = { 'content': '' } elif not isinstance(content, dict): content = { 'content': content } if isinstance(renderer, str): content = ctx.render_mako_template(renderer, **content) else: content = renderer(ctx, **content) if content is None: content = '' elif isinstance(content, unicode): content = content.encode('utf-8') raise HTTPContent(content) # Return the content. except HTTPContent, payload: content = payload.content if 'Content-Type' not in ctx.response_headers: ctx.response_headers['Content-Type'] = 'text/html; charset=utf-8' ctx.response_headers['Content-Length'] = str(len(content)) start_response(('%d %s\r\n' % ctx._status), get_response_headers()) if http_method == 'HEAD': return [] return [content]
def handle_http_request(env, start_response, dict=dict, isinstance=isinstance, urlunquote=urlunquote, unicode=unicode, get_response_headers=lambda: None): reqlocal.template_error_traceback = None try: http_method = env['REQUEST_METHOD'] ssl_mode = env['wsgi.url_scheme'] == 'https' if http_method == 'OPTIONS': start_response(*RESPONSE_OPTIONS) return [] if http_method not in SUPPORTED_HTTP_METHODS: start_response(*RESPONSE_NOT_IMPLEMENTED) return [] _path_info = env['PATH_INFO'] if isinstance(_path_info, unicode): _args = [arg for arg in _path_info.split(u'/') if arg] else: _args = [ unicode(arg, 'utf-8', 'strict') for arg in _path_info.split('/') if arg ] kwargs = {} for part in [ sub_part for part in env['QUERY_STRING'].lstrip('?').split('&') for sub_part in part.split(';') ]: if not part: continue part = part.split('=', 1) if len(part) == 1: value = None else: value = part[1] key = urlunquote(part[0].replace('+', ' ')) if value: value = unicode(urlunquote(value.replace('+', ' ')), 'utf-8', 'strict') else: value = None if key in kwargs: _val = kwargs[key] if isinstance(_val, list): _val.append(value) else: kwargs[key] = [_val, value] continue kwargs[key] = value ctx = Context(env, ssl_mode) router = handle_http_request.router if router: _info = router(ctx, _args, kwargs) if not _info: logging.error("No handler found for: %s" % _path_info) raise NotFound name, args = _info else: if _args: name = _args[0] args = _args[1:] else: name = '/' args = () if name not in HANDLERS: logging.error("Handler not found: %s" % name) raise NotFound handler, renderers, config = HANDLERS[name] json = config['json'] # Parse the POST body if it exists and is of a known content type. if http_method == 'POST': content_type = env.get('CONTENT-TYPE', '') if not content_type: content_type = env.get('CONTENT_TYPE', '') if ';' in content_type: content_type = content_type.split(';', 1)[0] if json or content_type == 'application/json': payload = json_decode(env['wsgi.input'].read()) if json and not (json is True): kwargs[json] = payload else: kwargs.update(payload) elif content_type in VALID_REQUEST_CONTENT_TYPES: post_environ = env.copy() post_environ['QUERY_STRING'] = '' if config['post_encoding']: ctx.request_body = env['wsgi.input'].read() env['wsgi.input'] = StringIO(ctx.request_body) post_encoding = config['post_encoding'] else: post_encoding = 'utf-8' post_data = FieldStorage(environ=post_environ, fp=env['wsgi.input'], keep_blank_values=True).list or [] for field in post_data: key = field.name if field.filename: if config['blob']: value = parse_blob_info(field) else: value = field else: value = unicode(field.value, post_encoding, 'strict') if key in kwargs: _val = kwargs[key] if isinstance(_val, list): _val.append(value) else: kwargs[key] = [_val, value] continue kwargs[key] = value def get_response_headers(): # Figure out the HTTP headers for the response ``cookies``. cookie_output = SimpleCookie() for name, values in ctx._response_cookies.iteritems(): name = str(name) cookie_output[name] = values.pop('value') cur = cookie_output[name] for key, value in values.items(): if key == 'max_age': key = 'max-age' if key not in COOKIE_KEY_NAMES: continue cur[key] = value if cookie_output: raw_headers = ctx._raw_headers + [ ('Set-Cookie', ck.split(' ', 1)[-1]) for ck in str(cookie_output).split('\r\n') ] else: raw_headers = ctx._raw_headers str_headers = [] new_header = str_headers.append for k, v in raw_headers: if isinstance(k, unicode): k = k.encode('utf-8') if isinstance(v, unicode): v = v.encode('utf-8') new_header((k, v)) return str_headers if 'submit' in kwargs: del kwargs['submit'] if 'callback' in kwargs: ctx.json_callback = kwargs.pop('callback') if env.get('HTTP_X_REQUESTED_WITH') == 'XMLHttpRequest': ctx.ajax_request = 1 if '__ajax__' in kwargs: ctx.ajax_request = 1 del kwargs['__ajax__'] if config['ssl'] and RUNNING_ON_GOOGLE_SERVERS and not ssl_mode: raise NotFound if config['xsrf']: if 'xsrf' not in kwargs: raise AuthError("XSRF token not present.") provided_xsrf = kwargs.pop('xsrf') if not secure_string_comparison(provided_xsrf, ctx.xsrf_token): raise AuthError("XSRF tokens do not match.") if config['admin'] and not ctx.is_admin: raise NotFound if (not config['anon']) and (not ctx.user_id): if ctx.ajax_request: ctx.response_headers['Content-Type'] = 'application/json' raise HTTPContent( encode_json({ "error": { "type": "AuthError", "redirect": ctx.get_login_url() } })) raise Redirect(ctx.get_login_url()) # Try and respond with the result of calling the handler. content = handler(ctx, *args, **kwargs) for renderer in renderers: if ctx.end_pipeline: break if content is None: content = {'content': ''} elif not isinstance(content, dict): content = {'content': content} if isinstance(renderer, str): content = ctx.render_mako_template(renderer, **content) else: content = renderer(ctx, **content) if content is None: content = '' elif isinstance(content, unicode): content = content.encode('utf-8') raise HTTPContent(content) # Return the content. except HTTPContent, payload: content = payload.content if 'Content-Type' not in ctx.response_headers: ctx.response_headers['Content-Type'] = 'text/html; charset=utf-8' ctx.response_headers['Content-Length'] = str(len(content)) start_response(('%d %s\r\n' % ctx._status), get_response_headers()) if http_method == 'HEAD': return [] return [content]
def set_plans(self, plans): self.plans = encode_json(plans)
def parse_data(): types_data = memcache.get('types.data') if not types_data: types_data = Meta.get_by_key_name('types').json types = dict((k, v) for k, v, _, _ in decode_json(types_data)) logging.info(repr(types)) data = decode_json(get_data()) updated = data['feed']['updated']['$t'] atlas = []; add_atlas = atlas.append mosaic = []; add_mosaic = mosaic.append full = []; add_full = full.append skipped = [] i = 1 entry = None def skip(reason, append=skipped.append): append((i, entry['content']['$t'], reason)) return seen = set(); add_seen = seen.add for entry in data['feed']['entry']: i += 1 name = entry['gsx$name']['$t'].strip() if not name: skip("The name cannot be empty") continue ident = norm_id(name.lower()) if ident in seen: skip("There is already a record with the id %r" % ident) continue add_seen(ident) geo = entry['gsx$geo']['$t'].strip() if not geo: skip("The geo coordinates cannot be empty") continue geo_split = geo.split(',') if len(geo_split) != 2: skip("Invalid geo coordinates: %s" % repr(geo)[1:]) continue try: geo_split = map(float, geo_split) except: skip("Invalid geo coordinates: %s" % repr(geo)[1:]) continue lat, lng = geo_split if not (-90 <= lat <= 90): skip("Invalid latitude: %r" % lat) continue if not (-180 <= lng <= 180): skip("Invalid longitude: %r" % lng) continue type = entry['gsx$type']['$t'].strip() if type not in types: skip("Invalid record type: %s" % repr(type)[1:]) continue web = entry['gsx$web']['$t'].strip() if web: if not (web.startswith('http://') or web.startswith('https://')): skip("Invalid website URL: %s" % repr(web)[1:]) continue if web.count('http://') > 1: skip("Invalid website URL: %s" % repr(web)[1:]) continue if ' ' in web: skip("Invalid website URL: %s" % repr(web)[1:]) continue # established = entry['gsx$est']['$t'].strip() # if established: # try: # established = int(established) # except: # skip("Invalid Est. year: %s" % repr(web)[1:]) # continue # else: # established = None desc = entry['gsx$desc']['$t'].strip() if desc: tags = find_hashtags(desc) else: tags = [] if name == 'Fablab Kamakura': tags.append('Awesome') elif name == 'Special Place': logging.info("TAGS: %s" % tags) tags.append('Hackspace') ltags = [tag.lower() for tag in tags] ltype = type.lower() if ltype not in ltags: ltags.append(ltype) tags.append(type) twitter = entry['gsx$twitter']['$t'].strip() if twitter: if twitter.startswith('https://twitter.com/'): twitter_id = twitter[20:] elif twitter.startswith('https://www.twitter.com/'): twitter_id = twitter[24:] if '/' in twitter_id: skip("Invalid Twitter ID: %s" % repr(twitter)[1:]) continue else: twitter = twitter_id facebook = entry['gsx$facebook']['$t'].strip() if facebook: if not ( facebook.startswith('http://www.facebook.com') or facebook.startswith('https://www.facebook.com') ): skip("Invalid Facebook Link: %s" % repr(facebook)[1:]) continue country = entry['gsx$country']['$t'].strip() if country and len(country) == 2: cupper = country.upper() if cupper in COUNTRIES: country = COUNTRIES[cupper] address = '<br>'.join(filter(None, [ entry['gsx$address1']['$t'].strip(), entry['gsx$address2']['$t'].strip(), entry['gsx$address3']['$t'].strip(), entry['gsx$address4']['$t'].strip(), entry['gsx$postcode']['$t'].strip(), country, ])) add_atlas((types[type], lat, lng, name, desc)) add_mosaic((name, tags)) add_full(( ident, name, ltags, twitter, encode_json((name, desc, tags, web, twitter, facebook, address)) )) return updated, atlas, mosaic, full, skipped
screen_name=id, count=1, exclude_replies=True, include_rts=False ) except Exception, err: logging.error("Got error fetching tweets for %r: %s" % (id, err)) meta = Meta.get_by_key_name(key) if meta: memcache.set(key, meta.json, 600) return meta.json tweets = [] if tweets: tweet = tweets[0] date = tweet['created_at'].split() info = [tweet['user']['screen_name']] info.append("%s %s %s" % (MONTHS[date[1]], ORDINALS[int(date[2])], date[-1])) info.append(tweet['text']) info = encode_json(info) else: info = 'null' meta = Meta.get_or_insert(key_name=key, json=info) meta.json = info meta.put() memcache.set(key, info, 600) return info # ----------------------------------------------------------------------------- # Admin Access # ----------------------------------------------------------------------------- def is_auth(ctx, key): if secure_string_comparison(key, ADMIN_SECRET): ctx.auth_key = key
for x, y in zip(auth, SECRET_KEY): total |= ord(x) ^ ord(y) if total != 0: return respond(RESPONSE_401, ERROR_401) try: content = dict(response=service(*args, **kwargs)) except CapabilityDisabledError: return respond(RESPONSE_503, ERROR_503) except Exception, error: logging.critical(''.join(format_exception(*sys.exc_info()))) content = dict( error=("%s: %s" % (error.__class__.__name__, error)) ) content = encode_json(content) headers = [ ("Content-Type", "application/json; charset=utf-8"), ("Content-Length", str(len(content))) ] if cache and http_method == 'GET': if isinstance(cache, int): duration = cache else: duration = 86400 headers.append(("Cache-Control", "public, max-age=%d;" % duration)) start_response("200 OK", headers) if http_method == 'HEAD': return []
from twisted.internet import reactor except ImportError, e: raise RuntimeError("You need to install twisted") from json import dumps as encode_json, loads as decode_json import socket # We define some constants MAX_TELEX_BYTESIZE = 1400 SEEDS = [ (socket.gethostbyname('telehash.org'), 42424) ] TELEX_GET_SEEDS = encode_json({'+end': '3b6a6...'}) class Print(DatagramProtocol): def datagramReceived(self, data, (host, port)): print "Received from %s:%d:" % (host, port) telexContent = decode_json(data) print telexContent (host,port_str) = telexContent['_to'].split(':') port = int(port) print host,port if (host,port) not in SEEDS: SEEDS.append ((host,port)) print 'SEEDS:' print SEEDS self.transport.write(TELEX_GET_SEEDS, SEEDS[-1])