Ejemplo n.º 1
0
Archivo: main.py Proyecto: tav/open-map
def parse_types_data():
    data = decode_json(get_types_data())
    updated = data['feed']['updated']['$t']
    skipped = []
    i = 1
    entry = None
    def skip(reason, append=skipped.append):
        append((i, entry['content']['$t'], reason))
        return
    seen = set(); add_seen = seen.add
    types = []
    for entry in data['feed']['entry']:
        i += 1
        if entry['gsx$inuse']['$t'].strip() != 'Y':
            skip("This type is no longer in use")
            continue
        name = entry['gsx$name']['$t'].strip()
        if not name:
            skip("The name cannot be empty")
            continue
        if name in seen:
            skip("The type %s is already defined" % name)
            continue
        add_seen(name)
        desc = entry['gsx$desc']['$t'].strip()
        quote = entry['gsx$quote']['$t'].strip()
        types.append([name, i-1, desc, quote])
    types.sort()
    return updated, types, skipped
Ejemplo n.º 2
0
    def updateValues(self):
        
        json = None
        
        try:
            resp = urlopen(self.QUERY_PAGE + self.LOCATION + "&units=" + self.UNITS + "&APPID=" + self.APPID)
            json = decode_json(resp.read())
        except:
            pass

        if json and "main" in json.keys():

            self.last_update = datetime.now()

            if "temp" in json["main"].keys():
                if self.DEBUG:
                    print "curr_Temp:", float(json["main"]["temp"]), "C"
                self.temp = float(json["main"]["temp"])

            if "humidity" in json["main"].keys():
                if self.DEBUG:
                    print "curr_Humidity:", int(json["main"]["humidity"]), "%"
                self.humid = int(json["main"]["humidity"])
            
        elif self.DEBUG:
            print "Error getting values from Server"
Ejemplo n.º 3
0
    def post(self,
             api_method,
             http_method='POST',
             expected_status=(200, ),
             **extra_params):

        if not (api_method.startswith('http://')
                or api_method.startswith('https://')):
            api_method = '%s%s%s' % (self.service_info['default_api_prefix'],
                                     api_method,
                                     self.service_info['default_api_suffix'])

        if self.token is None:
            self.token = OAuthAccessToken.get_by_key_name(self.get_cookie())

        fetch = urlfetch(url=api_method,
                         payload=self.get_signed_body(api_method, self.token,
                                                      http_method,
                                                      **extra_params),
                         method=http_method)

        if fetch.status_code not in expected_status:
            raise ValueError("Error calling... Got return status: %i [%r]" %
                             (fetch.status_code, fetch.content))

        return decode_json(fetch.content)
Ejemplo n.º 4
0
def main(mountpoint='~', debug=False):
	mountpoint = expanduser(mountpoint)
	init_logging(debug)
	daemon = VmReadFSd(mountpoint)
	line = stdin.readline()
	while line:
		log.debug("in %s", line.strip())
		try:
			line = decode_json(line)
			for method, parameters in line.items():
				if method in set(('flush', 'getattr', 'open', 'opendir', 'readdir', 'release', 'releasedir', 'read', 'lookup', 'debug', )):
					method = getattr(daemon, method)
					line = method(*parameters)
					line = encode_json(dict(result=line))
					print(line)
					stdout.flush()
				else:
					raise Exception('unsupported method "{}"'.format(method))
		except OSError as e:
			log.exception(str(e))
			line = encode_json(dict(error=(e.errno, )))
			print(line)
			stdout.flush()
		except Exception as e:
			log.exception(str(e))
			line = encode_json(dict(error=(EBADMSG, )))
			print(line)
			stdout.flush()
		log.debug("out %s", line)
		line = stdin.readline()
Ejemplo n.º 5
0
 def _send_receive(self, msg):
     self.daemon.stdin.write(bytes(encode_json(msg) + '\n', 'utf-8'))
     self.daemon.stdin.flush()
     msg = decode_json(str(self.daemon.stdout.readline(), 'utf-8'))
     if 'error' in msg:
         raise FUSEError(msg['error'][0])
     return msg['result']
Ejemplo n.º 6
0
    def getJSON(self, url):
        json = dict()
        try:
            resp = urlopen(url)
            json = decode_json(resp.read())
        except:
            print "Error getting data from Server"

        return json
Ejemplo n.º 7
0
def try_parse_res(res):
    """Try to parse a server response as JSON

    If it fails, will return code 'Server not running'"""

    parsed = dict(code=-5)
    if res is not None:
        try:
            parsed = decode_json(res.text)
        except JSONDecodeError:
            parsed["code"] = -1
    
    return parsed
Ejemplo n.º 8
0
def get_exchange_rate_to_gbp(currency, cache={}):
    if currency == 'GBP':
        return 1
    if currency in cache:
        return cache[currency]
    rate = memcache.get('exchange:%s' % currency)
    if rate:
        return cache.setdefault(currency, rate)
    url = "https://rate-exchange.appspot.com/currency?from=%s&to=GBP" % currency
    try:
        rate = decode_json(urlfetch(url).content)['rate']
    except Exception, err:
        logging.error("currency conversion: %s" % err)
        return 0
Ejemplo n.º 9
0
def get_exchange_rate_to_gbp(currency, cache={}):
    if currency == 'GBP':
        return 1
    if currency in cache:
        return cache[currency]
    rate = memcache.get('exchange:%s' % currency)
    if rate:
        return cache.setdefault(currency, rate)
    url = "https://rate-exchange.appspot.com/currency?from=%s&to=GBP" % currency
    try:
        rate = float(decode_json(urlfetch(url).content)['rate'])
    except Exception, err:
        logging.error("currency conversion: %s" % err)
        return 0
Ejemplo n.º 10
0
Archivo: main.py Proyecto: tav/open-map
def profile(ctx, id):
    ctx.load_script = 'loadProfile'
    ctx.page = 'movement'
    entry = Entry.get_by_key_name(id)
    if not entry:
        raise NotFound
    info = decode_json(entry.json)
    # related = {}
    # for tag in entry.tags:
    #     for e in Entry.all().filter('tags =', tag).run(limit=1000):
    #         ename = e.name
    #         if ename not in related:
    #             related[ename] = []
    #         related[ename].append(tag)
    tweet = None
    if entry.twitter:
        tweet = get_tweet(entry.twitter)
    # tweet = get_tweet('wikihouse')
    images = memcache.get('images')
    if not images:
        images = get_images()
    ident = entry.key().name()
    display_image = None
    images_data = decode_json(images)
    if ident in images_data:
        x, y = images_data[ident]
        display_image = int(y / (x/440.))
    ctx.norm_id = norm_id
    # ctx.cache_response(63)
    return {
        'display_image': display_image,
        'ident': ident,
        'info': info,
        'tweet': tweet,
        'twitter_id': entry.twitter
    }
Ejemplo n.º 11
0
def get_known_flags(start,
                    end,
                    url='https://segments.ligo.org',
                    ifo=None,
                    badonly=None):
    """Return the list of all flags with known segments

    Parameters
    ----------
    start : `int`
        the GPS start time of the query
    end : `int`
        the GPS end time of the query
    url : `str`, optional
        the FQDN of the target segment database
    ifo : `str`, optional
        the prefix for the IFO, if `None` all flags are returned

    Returns
    -------
    flags : `list` of `str`
        a list of flag names (<ifo>:<name>:<version>) that are known by
        the database in the given [start, end) interval
    """
    start = int(to_gps(start))
    end = int(to_gps(end))
    uri = '%s/report/known?s=%d&e=%d' % (url, start, end)
    out = decode_json(urifunctions.getDataUrllib2(uri))

    def select_flag(f):
        if ifo is not None and f['ifo'] != ifo:
            return False
        if (badonly is not None
                and f['metadata']['active_indicates_ifo_badness'] != badonly):
            return False
        return True

    return sorted([
        '%s:%s:%d' % (f['ifo'], f['name'], f['version'])
        for f in out['results'] if select_flag(f)
    ])
Ejemplo n.º 12
0
    def post(self, api_method, http_method='POST', expected_status=(200,), **extra_params):

        if not (api_method.startswith('http://') or api_method.startswith('https://')):
            api_method = '%s%s%s' % (
                self.service_info['default_api_prefix'], api_method,
                self.service_info['default_api_suffix']
                )

        if self.token is None:
            self.token = OAuthAccessToken.get_by_key_name(self.get_cookie())

        fetch = urlfetch(url=api_method, payload=self.get_signed_body(
            api_method, self.token, http_method, **extra_params
            ), method=http_method)

        if fetch.status_code not in expected_status:
            raise ValueError(
                "Error calling... Got return status: %i [%r]" %
                (fetch.status_code, fetch.content)
                )

        return decode_json(fetch.content)
Ejemplo n.º 13
0
def get_known_flags(start, end, url='https://segments.ligo.org', ifo=None,
                    badonly=None):
    """Return the list of all flags with known segments

    Parameters
    ----------
    start : `int`
        the GPS start time of the query
    end : `int`
        the GPS end time of the query
    url : `str`, optional
        the FQDN of the target segment database
    ifo : `str`, optional
        the prefix for the IFO, if `None` all flags are returned

    Returns
    -------
    flags : `list` of `str`
        a list of flag names (<ifo>:<name>:<version>) that are known by
        the database in the given [start, end) interval
    """
    start = int(to_gps(start))
    end = int(to_gps(end))
    uri = '%s/report/known?s=%d&e=%d' % (url, start, end)
    out = decode_json(urifunctions.getDataUrllib2(uri))

    def select_flag(f):
        if ifo is not None and f['ifo'] != ifo:
            return False
        if (badonly is not None and
                f['metadata']['active_indicates_ifo_badness'] != badonly):
            return False
        return True

    return sorted(['%s:%s:%d' % (f['ifo'], f['name'], f['version'])
                   for f in out['results'] if select_flag(f)])
Ejemplo n.º 14
0
Archivo: main.py Proyecto: tav/open-map
def parse_data():
    types_data = memcache.get('types.data')
    if not types_data:
        types_data = Meta.get_by_key_name('types').json
    types = dict((k, v) for k, v, _, _ in decode_json(types_data))
    logging.info(repr(types))
    data = decode_json(get_data())
    updated = data['feed']['updated']['$t']
    atlas = []; add_atlas = atlas.append
    mosaic = []; add_mosaic = mosaic.append
    full = []; add_full = full.append
    skipped = []
    i = 1
    entry = None
    def skip(reason, append=skipped.append):
        append((i, entry['content']['$t'], reason))
        return
    seen = set(); add_seen = seen.add
    for entry in data['feed']['entry']:
        i += 1
        name = entry['gsx$name']['$t'].strip()
        if not name:
            skip("The name cannot be empty")
            continue
        ident = norm_id(name.lower())
        if ident in seen:
            skip("There is already a record with the id %r" % ident)
            continue
        add_seen(ident)
        geo = entry['gsx$geo']['$t'].strip()
        if not geo:
            skip("The geo coordinates cannot be empty")
            continue
        geo_split = geo.split(',')
        if len(geo_split) != 2:
            skip("Invalid geo coordinates: %s" % repr(geo)[1:])
            continue
        try:
            geo_split = map(float, geo_split)
        except:
            skip("Invalid geo coordinates: %s" % repr(geo)[1:])
            continue
        lat, lng = geo_split
        if not (-90 <= lat <= 90):
            skip("Invalid latitude: %r" % lat)
            continue
        if not (-180 <= lng <= 180):
            skip("Invalid longitude: %r" % lng)
            continue
        type = entry['gsx$type']['$t'].strip()
        if type not in types:
            skip("Invalid record type: %s" % repr(type)[1:])
            continue
        web = entry['gsx$web']['$t'].strip()
        if web:
            if not (web.startswith('http://') or web.startswith('https://')):
                skip("Invalid website URL: %s" % repr(web)[1:])
                continue
            if web.count('http://') > 1:
                skip("Invalid website URL: %s" % repr(web)[1:])
                continue
            if ' ' in web:
                skip("Invalid website URL: %s" % repr(web)[1:])
                continue
        # established = entry['gsx$est']['$t'].strip()
        # if established:
        #     try:
        #         established = int(established)
        #     except:
        #         skip("Invalid Est. year: %s" % repr(web)[1:])
        #         continue
        # else:
        #     established = None
        desc = entry['gsx$desc']['$t'].strip()
        if desc:
            tags = find_hashtags(desc)
        else:
            tags = []
        if name == 'Fablab Kamakura':
            tags.append('Awesome')
        elif name == 'Special Place':
            logging.info("TAGS: %s" % tags)
            tags.append('Hackspace')
        ltags = [tag.lower() for tag in tags]
        ltype = type.lower()
        if ltype not in ltags:
            ltags.append(ltype)
            tags.append(type)
        twitter = entry['gsx$twitter']['$t'].strip()
        if twitter:
            if twitter.startswith('https://twitter.com/'):
                twitter_id = twitter[20:]
            elif twitter.startswith('https://www.twitter.com/'):
                twitter_id = twitter[24:]
            if '/' in twitter_id:
                skip("Invalid Twitter ID: %s" % repr(twitter)[1:])
                continue
            else:
                twitter = twitter_id
        facebook = entry['gsx$facebook']['$t'].strip()
        if facebook:
            if not (
                facebook.startswith('http://www.facebook.com') or
                facebook.startswith('https://www.facebook.com')
                ):
                skip("Invalid Facebook Link: %s" % repr(facebook)[1:])
                continue
        country = entry['gsx$country']['$t'].strip()
        if country and len(country) == 2:
            cupper = country.upper()
            if cupper in COUNTRIES:
                country = COUNTRIES[cupper]
        address = '<br>'.join(filter(None, [
            entry['gsx$address1']['$t'].strip(),
            entry['gsx$address2']['$t'].strip(),
            entry['gsx$address3']['$t'].strip(),
            entry['gsx$address4']['$t'].strip(),
            entry['gsx$postcode']['$t'].strip(),
            country,
        ]))
        add_atlas((types[type], lat, lng, name, desc))
        add_mosaic((name, tags))
        add_full((
            ident, name, ltags, twitter,
            encode_json((name, desc, tags, web, twitter, facebook, address))
            ))
    return updated, atlas, mosaic, full, skipped
Ejemplo n.º 15
0
 def get_plans(self):
     if not self.plans:
         return {'bronze': 0, 'silver': 0, 'gold': 0, 'platinum': 0}
     return decode_json(self.plans)
Ejemplo n.º 16
0
        role_file = join(path, role) + '.json'
        if isfile(role_file):
            break
    else:
        exit("ERROR: Couldn't find a data file for the %r role." % role)

    try:
        role_file = open(role_file, 'rb')
    except IOError, error:
        exit("ERROR: %s: %s" % (error[1], error.filename))

    role_data = role_file.read()
    role_file.close()

    try:
        role_data = decode_json(role_data)
    except Exception:
        exit("ERROR: Couldn't decode the JSON input: %s" % role_file.name)

    packages = set(role_data['packages'])
    for package in packages:
        install_package(package)

    if 'requires' in role_data:
        packages.update(load_role(role_data['requires']))

    if role == 'base':
        for package in packages:
            BASE_PACKAGES.update([package])
            BASE_PACKAGES.update(get_dependencies(package))
Ejemplo n.º 17
0
#!/usr/bin/python

from urllib import urlopen
from json import load as decode_json
from sys import argv, exit

if len(argv) != 2:
  print("please provide a filename to save to as an argument")
  exit(1)
out_file_name = argv[1]

json_io = urlopen("http://www.reddit.com/r/wallpapers/top/.json")
listing = decode_json(json_io)
image_url = listing["data"]["children"][0]["data"]["url"] #lol
image_io = urlopen(image_url)
out_file = open(out_file_name, "w")
out_file.write(image_io.read())
out_file.close()
Ejemplo n.º 18
0
#!/usr/bin/python

from urllib import urlopen
from json import load as decode_json
from sys import argv, exit

if len(argv) != 2:
    print("please provide a filename to save to as an argument")
    exit(1)
out_file_name = argv[1]

json_io = urlopen("http://www.reddit.com/r/wallpapers/top/.json")
listing = decode_json(json_io)
image_url = listing["data"]["children"][0]["data"]["url"]  #lol
image_io = urlopen(image_url)
out_file = open(out_file_name, "w")
out_file.write(image_io.read())
out_file.close()
Ejemplo n.º 19
0
def get_trailer_Yid(title, date):
    return json_to_Yid(decode_json(youtube_request(title, date)))
Ejemplo n.º 20
0
Archivo: model.py Proyecto: tav/gitfund
 def get_plans(self):
     if not self.plans:
         return {'bronze': 0, 'silver': 0, 'gold': 0, 'platinum': 0}
     return decode_json(self.plans)