def run(self, name, *_, **kw): # Positional args. args = kw.get("args") or () if isinstance(args, basestring): args = deserialize(args) # Keyword args. kwargs = kw.get("kwargs") or {} if isinstance(kwargs, basestring): kwargs = deserialize(kwargs) # Expires can be int/float. expires = kw.get("expires") or None try: expires = float(expires) except (TypeError, ValueError): # or a string describing an ISO 8601 datetime. try: expires = maybe_iso8601(expires) except (TypeError, ValueError): pass res = self.app.send_task(name, args=args, kwargs=kwargs, countdown=kw.get("countdown"), serializer=kw.get("serializer"), queue=kw.get("queue"), exchange=kw.get("exchange"), routing_key=kw.get("routing_key"), eta=maybe_iso8601(kw.get("eta")), expires=expires) self.out(res.task_id)
def __init__(self, inputFile, **kwargs): if kwargs['groupby']: groupby = anyjson.deserialize(kwargs['groupby']) else: groupby = False self.map = Map(kwargs['name'], kwargs['language'], groupby) self.inputFile = inputFile self.features = {} self.where = kwargs['where'] self.codes_file = kwargs['codes_file'] self.width = kwargs['width'] self.minimal_area = kwargs['minimal_area'] self.country_name_index = kwargs['country_name_index'] self.country_code_index = kwargs['country_code_index'] self.longtitude0 = kwargs['longtitude0'] self.inputFileEncoding = kwargs['input_file_encoding'] self.projection = kwargs['projection'] if kwargs['viewport']: self.viewport = map(lambda s: float(s), kwargs['viewport'].split(' ')) else: self.viewport = False # spatial reference to convert to self.spatialRef = osr.SpatialReference() self.spatialRef.ImportFromProj4('+proj='+self.projection+' +lat_0=0 +lon_0='+self.longtitude0) # handle map insets if kwargs['insets']: self.insets = anyjson.deserialize(kwargs['insets']) else: self.insets = []
def do_test(klass, *args): with test_server(handler=tweetsource, methods=("post", "get"), port="random") as server: stream = klass("foo", "bar", *args, url=server.baseurl, want_json=True) tweet = stream.next() assert isinstance(tweet, str) anyjson.deserialize(tweet)
def __init__(self, model): self.app = current_app._get_current_object() self.name = model.name self.task = model.task self.schedule = model.schedule try: self.args = deserialize(model.args or u'[]') self.kwargs = deserialize(model.kwargs or u'{}') except ValueError: logging.error('Failed to serialize arguments for %s.', self.name, exc_info=1) logging.warning('Disabling %s', self.name) model.no_changes = True model.enabled = False model.save() self.options = { 'queue': model.queue, 'exchange': model.exchange, 'routing_key': model.routing_key, 'expires': model.expires } self.total_run_count = model.total_run_count self.model = model if not model.last_run_at: model.last_run_at = self._default_now() orig = self.last_run_at = model.last_run_at if not is_naive(self.last_run_at): self.last_run_at = self.last_run_at.replace(tzinfo=None) assert orig.hour == self.last_run_at.hour # timezone sanity
def getTriggersChecks(self): """ getTriggersChecks(self) - Returns all triggers with it check :rtype: json """ triggers_ids = list((yield self.getTriggers())) triggers = [] pipeline = yield self.rc.pipeline() for trigger_id in triggers_ids: pipeline.get(TRIGGER_PREFIX.format(trigger_id)) pipeline.smembers(TRIGGER_TAGS_PREFIX.format(trigger_id)) pipeline.get(LAST_CHECK_PREFIX.format(trigger_id)) pipeline.get(TRIGGER_NEXT_PREFIX.format(trigger_id)) results = yield pipeline.execute_pipeline() slices = [[triggers_ids[i/4]] + results[i:i + 4] for i in range(0, len(results), 4)] for trigger_id, trigger_json, trigger_tags, last_check, throttling in slices: if trigger_json is None: continue trigger = anyjson.deserialize(trigger_json) trigger = trigger_reformat(trigger, trigger_id, trigger_tags) trigger["last_check"] = None if last_check is None else anyjson.deserialize(last_check) trigger["throttling"] = long(throttling) if throttling and time.time() < long(throttling) else 0 triggers.append(trigger) defer.returnValue({"list": triggers})
def __init__(self, model): self.app = current_app._get_current_object() self.name = model.name self.task = model.task self.schedule = model.schedule try: self.args = deserialize(model.args or u'[]') self.kwargs = deserialize(model.kwargs or u'{}') except ValueError: logging.error('Failed to serialize arguments for %s.', self.name, exc_info=1) logging.warning('Disabling %s', self.name) model.no_changes = True model.enabled = False model.save() self.options = {'queue': model.queue, 'exchange': model.exchange, 'routing_key': model.routing_key, 'expires': model.expires} self.total_run_count = model.total_run_count self.model = model if not model.last_run_at: model.last_run_at = self._default_now() orig = self.last_run_at = model.last_run_at if not is_naive(self.last_run_at): self.last_run_at = self.last_run_at.replace(tzinfo=None) assert orig.hour == self.last_run_at.hour # timezone sanity
def __init__(self, record): self.name = record.name self.task = record.task if record.interval: i = record.interval self.schedule = schedules.schedule(timedelta(**{i.period: i.every})) else: i = record.crontab schedules.crontab(minute=i.minute, hour=i.hour, day_of_week=i.day_of_week) try: self.args = deserialize(record.args or u"[]") self.kwargs = deserialize(record.kwargs or u"{}") except ValueError: record.update_record(no_changes = True, enabled = False) db(db.celery_periodictasks).update(last_update=datetime.now()) db.commit() raise self.options = {"queue": record.queue, "exchange": record.exchange, "routing_key": record.routing_key, "expires": record.expires} self.total_run_count = record.total_run_count or 0 self.record = record if not record.last_run_at: record.update_record(last_run_at = datetime.now()) db(db.celery_periodictasks).update(last_update=datetime.now()) db.commit() self.last_run_at = record.last_run_at
def run(self, name, *_, **kw): from celery.execute import send_task # Positional args. args = kw.get("args") or () if isinstance(args, basestring): args = deserialize(args) # Keyword args. kwargs = kw.get("kwargs") or {} if isinstance(kwargs, basestring): kwargs = deserialize(kwargs) # Expires can be int. expires = kw.get("expires") or None try: expires = int(expires) except (TypeError, ValueError): pass res = send_task(name, args=args, kwargs=kwargs, countdown=kw.get("countdown"), serializer=kw.get("serializer"), queue=kw.get("queue"), exchange=kw.get("exchange"), routing_key=kw.get("routing_key"), eta=kw.get("eta"), expires=expires) self.out(res.task_id)
def __init__(self, record): self.name = record.name self.task = record.task if record.interval: i = record.interval self.schedule = schedules.schedule( timedelta(**{i.period: i.every})) else: i = record.crontab schedules.crontab(minute=i.minute, hour=i.hour, day_of_week=i.day_of_week) try: self.args = deserialize(record.args or u"[]") self.kwargs = deserialize(record.kwargs or u"{}") except ValueError: record.update_record(no_changes=True, enabled=False) db(db.celery_periodictasks).update(last_update=datetime.now()) db.commit() raise self.options = { "queue": record.queue, "exchange": record.exchange, "routing_key": record.routing_key, "expires": record.expires } self.total_run_count = record.total_run_count or 0 self.record = record if not record.last_run_at: record.update_record(last_run_at=datetime.now()) db(db.celery_periodictasks).update(last_update=datetime.now()) db.commit() self.last_run_at = record.last_run_at
def __init__(self, model): self.name = model.name self.task = model.task self.schedule = model.schedule try: self.args = deserialize(model.args or u"[]") self.kwargs = deserialize(model.kwargs or u"{}") except ValueError: # disable because of error deserializing args/kwargs model.no_changes = True model.enabled = False model.save() raise self.options = { "queue": model.queue, "exchange": model.exchange, "routing_key": model.routing_key, "expires": model.expires, } self.total_run_count = model.total_run_count self.model = model if not model.last_run_at: model.last_run_at = self._default_now() self.last_run_at = model.last_run_at
def handle_feed(self, job_body): #log.notice(u'handle_feed.') #if feed.has_key('delete'): #if delete feed's user_id in self.users.keys(): # self.handle_deletion(feed) # pass #else: feed = anyjson.deserialize(job_body) if isinstance(feed, unicode): feed = anyjson.deserialize(feed) print feed.get('from',{}).get('name') if feed.has_key('refresh'): if feed.get('refresh'): self.users, self.politicians = self.get_users() self.normal_users = self.get_normal_users() log.notice(u"Refresh user and politician list from check notice.") elif feed.get('from',{}).get('id') in self.users.keys(): #is a politician self.handle_new(feed) elif feed.get('from',{}).get('id') in self.normal_users.keys(): #is a normal user if not self.normal_users[ feed.get('from',{}).get('id') ]: #not be ignored. self.handle_tmp(feed) else: cursor = self.database.cursor() cursor.execute("""INSERT INTO normal_users (`user_name`, `facebook_id`) VALUES (%s, %s)""", (feed.get('from',{}).get('name'), feed.get('from',{}).get('id')) ) log.notice(u"add new normal user:{0}({1})", feed.get('from',{}).get('name'), feed.get('from',{}).get('id') ) self.normal_users = self.get_normal_users() self.handle_tmp(feed)
def run(self, name, *_, **kw): from celery.execute import send_task # Positional args. args = kw.get("args") or () if isinstance(args, basestring): args = deserialize(args) # Keyword args. kwargs = kw.get("kwargs") or {} if isinstance(kwargs, basestring): kwargs = deserialize(kwargs) # Expires can be int. expires = kw.get("expires") or None try: expires = int(expires) except (TypeError, ValueError): pass res = send_task(name, args=args, kwargs=kwargs, countdown=kw.get("countdown"), serializer=kw.get("serializer"), queue=kw.get("queue"), exchange=kw.get("exchange"), routing_key=kw.get("routing_key"), eta=kw.get("eta"), expires=expires) print(res.task_id)
def getTriggersChecks(self): """ getTriggersChecks(self) - Returns all triggers with it check :rtype: json """ triggers_ids = list((yield self.getTriggers())) triggers = [] pipeline = yield self.rc.pipeline() for trigger_id in triggers_ids: pipeline.get(TRIGGER_PREFIX.format(trigger_id)) pipeline.smembers(TRIGGER_TAGS_PREFIX.format(trigger_id)) pipeline.get(LAST_CHECK_PREFIX.format(trigger_id)) pipeline.get(TRIGGER_NEXT_PREFIX.format(trigger_id)) results = yield pipeline.execute_pipeline() slices = [[triggers_ids[i / 4]] + results[i:i + 4] for i in range(0, len(results), 4)] for trigger_id, trigger_json, trigger_tags, last_check, throttling in slices: if trigger_json is None: continue trigger = anyjson.deserialize(trigger_json) trigger = trigger_reformat(trigger, trigger_id, trigger_tags) trigger[ "last_check"] = None if last_check is None else anyjson.deserialize( last_check) trigger["throttling"] = long( throttling ) if throttling and time.time() < long(throttling) else 0 triggers.append(trigger) defer.returnValue({"list": triggers})
def __init__(self, model): self.name = model.name self.task = model.task self.schedule = model.schedule try: self.args = deserialize(model.args or u"[]") self.kwargs = deserialize(model.kwargs or u"{}") except ValueError: # disable because of error deserializing args/kwargs model.no_changes = True model.enabled = False model.save() raise self.options = { "queue": model.queue, "exchange": model.exchange, "routing_key": model.routing_key, "expires": model.expires } self.total_run_count = model.total_run_count self.model = model if not model.last_run_at: model.last_run_at = self._default_now() self.last_run_at = model.last_run_at
def __init__(self, model): self.app = current_app._get_current_object() self.name = model.name self.task = model.task self.schedule = model.schedule try: self.args = deserialize(model.args or u"[]") self.kwargs = deserialize(model.kwargs or u"{}") except ValueError: # disable because of error deserializing args/kwargs model.no_changes = True model.enabled = False model.save() raise self.options = {"queue": model.queue, "exchange": model.exchange, "routing_key": model.routing_key, "expires": model.expires} self.total_run_count = model.total_run_count self.model = model if not model.last_run_at: model.last_run_at = self._default_now() orig = self.last_run_at = model.last_run_at if not is_naive(self.last_run_at): self.last_run_at = self.last_run_at.replace(tzinfo=None) assert orig.hour == self.last_run_at.hour # timezone sanity
def __init__(self, model): self.app = current_app._get_current_object() self.name = model.name self.task = model.task self.schedule = model.schedule try: self.args = deserialize(model.args or u"[]") self.kwargs = deserialize(model.kwargs or u"{}") except ValueError: # disable because of error deserializing args/kwargs model.no_changes = True model.enabled = False model.save() raise self.options = { "queue": model.queue, "exchange": model.exchange, "routing_key": model.routing_key, "expires": model.expires } self.total_run_count = model.total_run_count self.model = model if not model.last_run_at: model.last_run_at = self._default_now() orig = self.last_run_at = model.last_run_at if not is_naive(self.last_run_at): self.last_run_at = self.last_run_at.replace(tzinfo=None) assert orig.hour == self.last_run_at.hour # timezone sanity
def __init__(self, inputFile, **kwargs): if kwargs['groupby']: groupby = anyjson.deserialize(kwargs['groupby']) else: groupby = False self.map = Map(kwargs['name'], kwargs['language'], groupby) self.inputFile = inputFile self.features = {} self.where = kwargs['where'] self.codes_file = kwargs['codes_file'] self.width = kwargs['width'] self.minimal_area = kwargs['minimal_area'] self.country_name_index = kwargs['country_name_index'] self.country_code_index = kwargs['country_code_index'] self.longtitude0 = kwargs['longtitude0'] self.inputFileEncoding = kwargs['input_file_encoding'] self.projection = kwargs['projection'] if kwargs['viewport']: self.viewport = map(lambda s: float(s), kwargs['viewport'].split(' ')) else: self.viewport = False # spatial reference to convert to self.spatialRef = osr.SpatialReference() self.spatialRef.ImportFromProj4('+proj=' + self.projection + ' +lat_0=0 +lon_0=' + self.longtitude0) # handle map insets if kwargs['insets']: self.insets = anyjson.deserialize(kwargs['insets']) else: self.insets = []
def ga_client(self, r_id, ga_id): ga_key = self.v.get('ga_key', '') if ga_key and ga_key == self.cache.get('K'+ga_id): # Server-to-server request self.cache.delete('K'+ga_id) data = '' if self.v.get('ga_nologin','') != '': data = 'nologin' else: try: data = anyjson.deserialize(self.v.get('ga_data','')) except: raise if data != '': if data != 'nologin': data = anyjson.serialize(data) self.cache.set('D'+ga_id, data) self.server.header('text/plain') self.server.write('1') sys.exit() elif ga_key == '' and r_id != ga_id: # User redirect with different key d = self.cache.get('D'+ga_id) if d != 'nologin' and d != '': try: d = anyjson.deserialize(d) except: d = '' if d != '': self.setcookie(ga_id) self.redirect(self.clean_uri()) self.server.header('text/plain', status=404) self.server.write('GlobalAuth key doesn\'t match') sys.exit()
def __getitem__(self, item): # TODO - Memoize this item = self._slugify(item) path = "%s/%s%s" % (FLAGS.datastore_path, self.prefix, item) if os.path.isfile(path): return anyjson.deserialize(open(path, 'r').read()) return None
def importCurrencies(self): logger = getLogger('database') logger.info('importing currencies') with open('./data/iso-4217-currency.json') as istream: currencies = deserialize(''.join(istream.readlines())) cursor = self._cursor() alreadyInserted = set(self._readAllCurrencyIds(cursor)) alreadyInserted.add(None) for currency in currencies: if (currency['Withdrawal_Date'] is not None or currency['Withdrawal_Interval'] is not None): # ignore currencies that have been withdrawn continue code = currency['Numeric_Code'] code = int(code) if code is not None else None if code in alreadyInserted: # remove those without a numeric code and # remember that currencies can be used by multiple entities continue alreadyInserted.add(code) try: cursor.execute( ''' INSERT INTO currency(id, code, name) VALUES(%s, %s, %s) ''', (code, currency['Alphabetic_Code'], currency['Currency'])) except Exception: logger.error(currency, exc_info=True) self.rollback() raise self.commit()
def next(self): """Return the next available tweet. This call is blocking!""" while True: try: if not self.connected: self._init_conn() rate_time = time.time() - self._rate_ts if not self._rate_ts or rate_time > self.rate_period: self.rate = self._rate_cnt / rate_time self._rate_cnt = 0 self._rate_ts = time.time() data = self._conn.readline() if data == "": # something is wrong self.close() raise ConnectionError("Got entry of length 0. Disconnected") elif data.isspace(): continue data = anyjson.deserialize(data) self.count += 1 self._rate_cnt += 1 return data except ValueError, e: self.close() raise ConnectionError("Got invalid data from twitter", details=data) except socket.error, e: self.close() raise ConnectionError("Server disconnected")
def __iter__(self): while True: try: if not self.connected: self._init_conn() for line in self.response.iter_lines(): if line: if (self._raw_mode): tweet = line else: try: tweet = anyjson.deserialize(line) except ValueError, e: self.close() raise ReconnectImmediatelyError("Invalid data: %s" % line) if 'text' in tweet: self.count += 1 self._rate_cnt += 1 yield tweet except socket.error, e: raise ReconnectImmediatelyError("Server disconnected: %s" % (str(e)))
def request(self, method, url, **kwargs): # Fix up request headers hdrs = kwargs.get('headers', {}) hdrs['Accept'] = 'application/json' hdrs['User-Agent'] = self.USER_AGENT # If request has a body, treat it as JSON if 'body' in kwargs: hdrs['Content-Type'] = 'application/json' kwargs['data'] = anyjson.serialize(kwargs['body']) del kwargs['body'] kwargs['headers'] = hdrs resp = requests.request(method, (self.endpoint + self.project_id) + url, **kwargs) if resp.text: if resp.status_code == 400: if ('Connection refused' in resp.text or 'actively refused' in resp.text): raise exceptions.ConnectionRefused(resp.text) try: body = anyjson.deserialize(resp.text) except ValueError: pass body = None else: body = None return resp, body
def _request(method, url, params={}, data={}, headers={}): splits = urlparse.urlsplit(url) netloc = splits[1] if '@' in netloc: netloc_noauth = netloc.split('@')[1] else: netloc_noauth = netloc scheme = splits[0] path = splits[2] query = splits[3] fragment = splits[4] username = '' password = '' if '@' in netloc: password = netloc.split('@')[0][1:] if ':' in netloc_noauth: netloc_noauth, port = netloc_noauth.split(':') else: port = 80 url = urlparse.urlunsplit((scheme, netloc_noauth, path, query, fragment)) if method in ['GET', 'DELETE']: params = urllib.urlencode(params, True) if params: if '?' not in url: url += '?' + params else: url += '&' + params connection = httplib.HTTPConnection(netloc_noauth, port) if username or password: credentials = "%s:%s" % (username, password) base64_credentials = base64.encodestring(credentials) authorization = "Basic %s" % base64_credentials[:-1] headers['Authorization'] = authorization headers['User-Agent'] = __USER_AGENT if data: body = anyjson.serialize(data) else: body = '' connection.request(method, url, body, headers) response = connection.getresponse() response.body = response.read() if _is_ok(response.status): if response.body: try: response.body = anyjson.deserialize(response.body) except ValueError, e: raise InvalidResponseFromServer( 'The JSON response could not be parsed: %s.\n%s' % (e, response.body)) ret = response.status, response.body else: ret = response.status, None
def run(): stream = openConn() from django.utils.encoding import smart_str, smart_unicode while True: data = stream.readline() print "Data = %s" %len(data) if (data != None and data != "" and len(data) > 2): data = anyjson.deserialize(data) unData = {} for k,v in data.items(): # print "-- %s --"%(k) # v = smart_unicode(v) # print "-- %s " %smart_str(v) # try: # print str(unicode(v)) # except: # print "ERRRR" unData[k] = smart_str(v) print "======================" print "======================" params = urllib.urlencode(unData) f = urllib.urlopen('http://127.0.0.1:8000/saveStatus', params) d = f.read() # print "Data = %s" %(d) filename = "err.html" file = open(filename, 'w') file.write(d) file.close() f.close()
def next(self): """Return the next available tweet. This call is blocking!""" while True: try: if not self.connected: self._init_conn() rate_time = time.time() - self._rate_ts if not self._rate_ts or rate_time > self.rate_period: self.rate = self._rate_cnt / rate_time self._rate_cnt = 0 self._rate_ts = time.time() data = self._conn.readline() if data == "": # something is wrong self.close() raise ConnectionError( "Got entry of length 0. Disconnected") elif data.isspace(): continue data = anyjson.deserialize(data) self.count += 1 self._rate_cnt += 1 return data except ValueError, e: self.close() raise ConnectionError("Got invalid data from twitter", details=data) except socket.error, e: self.close() raise ConnectionError("Server disconnected")
def wait_once(self, since=None, heartbeat=False, timeout=60000, filter_name=None): """Wait for one change and return (longpoll feed) Args: @param since: str or int, sequence from which you want to get changes @param heartbeat: boolean, try to maintain connection by sending '\n' @param timeout: int, timeout in ms @param filter_name: filter_name to use @return: dict, change result """ params = self._make_params("longpoll", since, heartbeat, timeout, filter_name) resp = self.db.res.get("_changes", **params) buf = "" while True: data = resp.body_file.read() if not data: break buf += data ret = anyjson.deserialize(buf) for callback in self.callbacks: callback(ret) return ret
def __init__(self, inputFile, **kwargs): self.map = Map(kwargs['name'], kwargs['language']) self.inputFile = inputFile self.features = {} self.where = kwargs['where'] self.codes_file = kwargs['codes_file'] self.width = kwargs['width'] self.minimal_area = kwargs['minimal_area'] self.country_name_index = kwargs['country_name_index'] self.country_code_index = kwargs['country_code_index'] self.longtitude0 = kwargs['longtitude0'] self.inputFileEncoding = kwargs['input_file_encoding'] if kwargs['viewport']: self.viewport = map(lambda s: float(s), kwargs['viewport'].split(' ')) else: self.viewport = False # spatial reference to convert to self.spatialRef = osr.SpatialReference() self.spatialRef.ImportFromProj4( '+proj=mill +lat_0=0 +lon_0=' + self.longtitude0 + ' +x_0=0 +y_0=0 +R_A +ellps=WGS84 +datum=WGS84 +units=m +no_defs') # handle map insets if kwargs['insets']: self.insets = anyjson.deserialize(kwargs['insets']) else: self.insets = []
def parse_request(self, request): try: request = anyjson.deserialize(request['data']) except KeyError, e: logging.error("Request dictionary contains no 'data' key") return self.encode_result((500, "Internal error with request"))
def read_xenstore(): recon() xs_handle = pyxenstore.Handle() try: hostname = xs_handle.read(XENSTORE_HOSTNAME_PATH) print('hostname: %r (from xenstore)\n' % hostname) except pyxenstore.NotFoundError: hostname = DEFAULT_HOSTNAME print('hostname: %r (DEFAULT)\n' % hostname) interfaces = [] try: entries = xs_handle.entries(XENSTORE_INTERFACE_PATH) except pyxenstore.NotFoundError: entries = [] for entry in entries: data = xs_handle.read(XENSTORE_INTERFACE_PATH + '/' + entry) data = anyjson.deserialize(data) interfaces.append(data) print('interface %s: %r\n' % (entry, data)) del xs_handle
def get_queryset(self, *args, **kwargs): super(MultipleObjectFilterMixin, self).get_queryset(*args, **kwargs) qs = self.queryset if 'filter' in self.request.GET: data = deserialize(self.request.GET['filter']) qs = qs.filter(**data) return qs.all()
def main(): global args args = parse_arguments() last_sync = "" if not args.all: now = datetime.now() last_sync = (now - timedelta(hours=3)).strftime("%Y-%m-%d %H:%M:%S") last_sync = urllib.parse.quote_plus(last_sync) connection = http.client.HTTPConnection(WSHOST, 80) uri = WSURI % last_sync connection.request("GET", uri) response = connection.getresponse() if response.status != http.client.OK: logging.error("The webservice didn't return OK :(") return 1 json = response.read().decode("utf-8") data = anyjson.deserialize(json) departures = fetch_departures(data) cgitb.enable() table = PrettyTable([ 'Scheduled', 'Expected', 'Destination', 'Flight', 'Airline', 'Gate', 'Aircraft', 'Reg', 'Status' ]) table.align = 'l' for departure in sorted(departures): table.add_row(departure) print(table) return 0
def load_authorized_ips(): # The authorized ips file has the following structure: # { # 'IP_ADDR': { # 'added': RFC_8601_DATETIME, # 'expires': RFC_8601_DATETIME, # 'whois': whois information about the IP at the time of recording, # 'geoip': geoip information about the IP at the time of recording, # } # # It is stored in GL_ADMIN_BASE/2fa/validations/GL_USER.js user = os.environ['GL_USER'] val_dir = os.path.join(os.environ['GL_ADMIN_BASE'], '2fa/validations') if not os.path.exists(val_dir): os.makedirs(val_dir, 0700) logger.debug('Created val_dir in %s' % val_dir) valfile = os.path.join(val_dir, '%s.js' % user) logger.debug('Loading authorized ips from %s' % valfile) valdata = {} if os.access(valfile, os.R_OK): try: fh = open(valfile, 'r') jdata = fh.read() fh.close() valdata = anyjson.deserialize(jdata) except: logger.critical( 'Validations file exists, but could not be parsed!') logger.critical( 'All previous validations have been lost, starting fresh.') return valdata
def get_message_headers(message): headers = {} for k, v in six.iteritems(message.headers): p = '{}-'.format(DEFAULT_WEBSERVER_H_PREFIX) k.startswith(p) and headers.update( {k[len(p):]: anyjson.deserialize(v)}) return headers
def __read_json(self): json_data = {'bunches': []} if exists(self.__json_data_file()): with open(self.__json_data_file(), 'r') as f: json_data = anyjson.deserialize(f.read()) return json_data
def on_gist_found(self, gist_id, response): gist_struct = anyjson.deserialize(response.body) pprint(gist_struct) try: gist_info = gist_struct #['gists'][0] except KeyError: # TODO: redirect to a warning page # gist is not found url = self.reverse_url("gist_not_found") url += '?gist_id=%s' % gist_id self.redirect(url) gist = self.db.Gist() gist.gist_id = gist_id gist.description = unicode(gist_info.get('description', u'')) #gist.created_at = date_parse(gist_info['created_at']) gist.created_at = unicode(gist_info['created_at']) gist.files = [unicode(x) for x in gist_info['files']] gist.contents = [] gist.public = gist_info['public'] gist.owner = unicode(gist_info['user']['login']) gist.repo = None #unicode(gist_info['repo']) gist.user = self.get_current_user() gist.save() self.redirect(self.reverse_url('edit_gist', gist.gist_id))
def add_queue(self, q, **kwargs): """Add queue for this instance by name.""" if isinstance(q, self.Queue): q = q.as_dict() else: queues = find_symbol(self, 'cyme.branch.controller.queues') try: q = queues.get(q) except queues.NoRouteError: self.queues.remove(q) self.save() warnings.warn('Removed unknown consumer: %r from %r' % (q, self.name)) return name = q['name'] options = deserialize(q['options']) if q.get('options') else {} exchange = q['exchange'] if q['exchange'] else name routing_key = q['routing_key'] if q['routing_key'] else name return self._query( 'add_consumer', dict(queue=q['name'], exchange=exchange, exchange_type=q['exchange_type'], routing_key=routing_key, **options), **kwargs)
def on_callback(self, request): if request.method != 'POST': request.respond('This hook only supports POST method.') else: if request.GET.get('secret', [None])[0] != self.bot.config.draftin_secret: request.respond('Wrong secret was specified') else: payload = anyjson.deserialize(request.POST['payload'][0]) title = payload['name'] content = payload['content'] slug = slugify(title) created_at = times.to_universal(payload['created_at']) updated_at = times.to_universal(payload['updated_at']) timezone = self.bot.config.timezone with open(os.path.join( self.bot.config.documents_dir, slug + '.md'), 'w') as f: post_content = self.template.format(title=title, content=content, slug=slug, created_at=times.format(created_at, timezone, '%Y-%m-%d %H:%M'), updated_at=times.format(updated_at, timezone, '%Y-%m-%d %H:%M')) f.write(post_content.encode('utf-8')) try: subprocess.check_output(self.bot.config.update_command, stderr=subprocess.STDOUT, shell=True) except subprocess.CalledProcessError, e: request.respond(u'I tried to update a blog, but there was an error: ' + e.output.encode('utf-8')) else: request.respond('Done, published')
def index(): item_reader = ItemReader() indexer = ItemIndexer(_api_client, _index_name) curr_millis = _start_millis while True: try: snapshot_resp = fetch('https://api.mercadolibre.com/items/snapshot/%s' % curr_millis) json_str = snapshot_resp.content json_obj = anyjson.deserialize(json_str) for item_id in json_obj: try: item = item_reader.get_item(item_id) item.indexed = False item.save() indexer.index(item) indexer.update_categories(item) indexer.update_variables(item) item.indexed = True item.save() print 'item indexed %s' % item_id except Exception, e: print e curr_millis += 1000 except Exception, e: print e
def update_layout(request): """ This view updates the page. """ layout = request.POST.get('layout', None) if layout is None: return http.HttpResponseBadRequest() try: layout = json.deserialize(layout) except ValueError: # No JSON object could be decoded return http.HttpResponseBadRequest() # temporary from [u'1', u'1'] -> [1, 1] layout['containers'] = check_containers(layout['containers']) page = get_page(request.user, request.session, for_update=True) if layout['timestamp'] <= page.layout['timestamp']: return http.HttpResponseForbidden(json.serialize(['KO', 'Expired'])) page.layout.arrange_widgets(layout['containers']) page.layout['timestamp'] = layout['timestamp'] page.save() return http.HttpResponse()
def on_gist_found(self, gist_id, response): gist_struct = anyjson.deserialize(response.body) pprint(gist_struct) try: gist_info = gist_struct#['gists'][0] except KeyError: # TODO: redirect to a warning page # gist is not found url = self.reverse_url("gist_not_found") url += '?gist_id=%s' % gist_id self.redirect(url) gist = self.db.Gist() gist.gist_id = gist_id gist.description = unicode(gist_info.get('description', u'')) #gist.created_at = date_parse(gist_info['created_at']) gist.created_at = unicode(gist_info['created_at']) gist.files = [unicode(x) for x in gist_info['files']] gist.contents = [] gist.public = gist_info['public'] gist.owner = unicode(gist_info['user']['login']) gist.repo = None #unicode(gist_info['repo']) gist.user = self.get_current_user() gist.save() self.redirect(self.reverse_url('edit_gist', gist.gist_id))
def run ( self ): self.running = True self.openConn() from django.utils.encoding import smart_str, smart_unicode while True: if self.stopNadwa: break; if self.pauseNadwa: continue; data = self.conn.readline() if (data != None and data != "" and len(data) > 2): data = anyjson.deserialize(data) unData = {} unData['nadwaName'] = self.nadwaName for k,v in data.items(): if k == "user": unData['profile_image_url'] = v['profile_image_url'] if k == "retweeted_status": unData['status_id'] = v['id_str'] # print 'activities_Key = %s ' %(v['activities']) unData[k] = smart_str(v) print "======================" try: params = urllib.urlencode(unData) f = urllib.urlopen('http://127.0.0.1:8000/saveStatus', params) file = open("./error.html", 'w') file.write(f.read()) file.close() code = f.code f.close() # if (code == 500): # break; except: pass print 'Stopped'
def __init__(self, inputFile, **kwargs): self.map = Map(kwargs["name"], kwargs["language"]) self.inputFile = inputFile self.features = {} self.where = kwargs["where"] self.codes_file = kwargs["codes_file"] self.width = kwargs["width"] self.minimal_area = kwargs["minimal_area"] self.country_name_index = kwargs["country_name_index"] self.country_code_index = kwargs["country_code_index"] self.longtitude0 = kwargs["longtitude0"] self.inputFileEncoding = kwargs["input_file_encoding"] if kwargs["viewport"]: self.viewport = map(lambda s: float(s), kwargs["viewport"].split(" ")) else: self.viewport = False # spatial reference to convert to self.spatialRef = osr.SpatialReference() self.spatialRef.ImportFromProj4( "+proj=mill +lat_0=0 +lon_0=" + self.longtitude0 + " +x_0=0 +y_0=0 +R_A +ellps=WGS84 +datum=WGS84 +units=m +no_defs" ) # handle map insets if kwargs["insets"]: self.insets = anyjson.deserialize(kwargs["insets"]) else: self.insets = []
def run(self): mimetypes.init() log.debug("Initialized mime type database.") screenshot_tube = self.config.get('beanstalk', 'screenshot_tube') self.beanstalk = politwoops.utils.beanstalk( host=self.config.get('beanstalk', 'host'), port=int(self.config.get('beanstalk', 'port')), watch=screenshot_tube, use=None) log.debug("Connected to queue.") while True: time.sleep(0.2) self.heart.beat() reserve_timeout = max(self.heart.interval.total_seconds() * 0.1, 2) job = self.beanstalk.reserve(timeout=reserve_timeout) if job: try: tweet = anyjson.deserialize(job.body) self.process_entities(tweet) job.delete() except Exception as e: log.error("Exception caught, burying screenshot job for tweet {tweet}: {e}", tweet=tweet.get('id'), e=e) job.bury()
def get_directions(self): if not hasattr(self, '_directions'): response = requests.get('http://translate.yandex.net/api/v1/tr.json/getLangs') data = anyjson.deserialize(response.content) self._directions = set(data['dirs']) return self._directions
def __init__(self, inputFile, **kwargs): self.map = Map(kwargs['name'], kwargs['language']) self.inputFile = inputFile self.features = {} self.where = kwargs['where'] self.codes_file = kwargs['codes_file'] self.width = kwargs['width'] self.minimal_area = kwargs['minimal_area'] self.country_name_index = kwargs['country_name_index'] self.country_code_index = kwargs['country_code_index'] self.longtitude0 = kwargs['longtitude0'] self.inputFileEncoding = kwargs['input_file_encoding'] if kwargs['viewport']: self.viewport = map(lambda s: float(s), kwargs['viewport'].split(' ')) else: self.viewport = False # spatial reference to convert to self.spatialRef = osr.SpatialReference() self.spatialRef.ImportFromProj4('+proj=mill +lat_0=0 +lon_0='+self.longtitude0+' +x_0=0 +y_0=0 +R_A +ellps=WGS84 +datum=WGS84 +units=m +no_defs') # handle map insets if kwargs['insets']: self.insets = anyjson.deserialize(kwargs['insets']) else: self.insets = []
def check_feeds(self): cursor = self.database.cursor() chk_day = datetime.datetime.today() - datetime.timedelta(days=2) #cursor.execute("SELECT `id`, `url`, `feed` FROM `feeds` WHERE `deleted` =0 and politician_id=50") cursor.execute("SELECT `id`, `url`, `feed` FROM `feeds` WHERE `deleted` = 0 and created>%s", chk_day.strftime("%Y/%m/%d")) feeds = cursor.fetchall() log.notice(u"counts:{0}", len(feeds)) for data in feeds: time.sleep(0.1) #delay a tick. try: # feed exist, put into for work. feed = self.fb_api.get_object(data[0]) #log.notice(u"from {0}", feed['from']['name']) self.beanstalk.put(anyjson.serialize(feed)) except Exception as e: # can't access feed by api, try through url. cursor.execute("""UPDATE `feeds` SET `unaccessable`=1 WHERE id = %s""",data[0]) raw_feed = anyjson.deserialize(data[2]) isactivity = True if u"likes a" in raw_feed.get('story','') or u"like a" in raw_feed.get('story','') or u"commented on" in raw_feed.get('story','') or u"a activity" in data[1] else False log.notice(u"raw_story:{0}, isactivity:{1}, raw_url:{2}", raw_feed.get('story',''), isactivity, data[1]) if not isactivity: html = requests.get(data[1], allow_redirects=True) log.notice("status code:{0}", html.status_code) if html.status_code == requests.codes.not_found: title = BeautifulSoup(html.text).title.string time.sleep(0.5) #sleep a I/O tick. log.notice(u"Title:{0}, url:{1}", title, data[1]) if u"找不到網頁" in title or u"Page Not Found" in title: # be deleted. self.handle_deletion(data[0])
def queryset(self, rqst, *args, **kwargs): queryset = super(HandlerFilterMixin,self).queryset(rqst, *args, **kwargs) if 'filter' in rqst.GET: s = rqst.GET['filter'] data = deserialize(s) queryset=queryset.filter(**data) return queryset
def run(self): mimetypes.init() log.debug("Initialized mime type database.") screenshot_tube = self.config.get('beanstalk', 'screenshot_tube') self.beanstalk = politwoops.utils.beanstalk( host=self.config.get('beanstalk', 'host'), port=int(self.config.get('beanstalk', 'port')), watch=screenshot_tube, use=None) log.debug("Connected to queue.") while True: time.sleep(0.2) self.heart.beat() reserve_timeout = max(self.heart.interval.total_seconds() * 0.1, 2) job = self.beanstalk.reserve(timeout=reserve_timeout) if job: try: tweet = anyjson.deserialize(job.body) self.process_entities(tweet) job.delete() except Exception as e: log.error( "Exception caught, burying screenshot job for tweet {tweet}: {e_type} {e}", tweet=tweet.get('id'), e=e, e_type=type(e)) job.bury()
def test_put__get(self): conn = create_connection(1) q = conn.Queue("testing") q.put(serialize({"name": "George Constanza"})) self.assertEquals(deserialize(q.get()), {"name": "George Constanza"})
def print_report(json_path, url_base): """ Print a report on the json savefile contents """ with open(json_path, 'r') as fh: j = anyjson.deserialize(fh.read()) seen = 0 review = 0 markup = 0 for path in j: s = "" if j[path]['seen'] is True: seen = seen + 1 if j[path]['review'] is True: s = " review" review = review + 1 if j[path]['markup'] is True: s = s + " markup" markup = markup + 1 if s != "": print("%s\t%s%s" % (s, url_base, path)) if j[path]['note'].strip() != "": print("\t%s" % j[path]['note']) print("==============================") print("Seen: %d" % seen) print("Review: %d" % review) print("Markup: %d" % markup) print("Unseen: %d" % (len(j) - seen)) print("=================") print("Total: %d" % len(j))
def load_authorized_ips(): # The authorized ips file has the following structure: # { # 'IP_ADDR': { # 'added': RFC_8601_DATETIME, # 'expires': RFC_8601_DATETIME, # 'whois': whois information about the IP at the time of recording, # 'geoip': geoip information about the IP at the time of recording, # } # # It is stored in GL_ADMIN_BASE/2fa/validations/GL_USER.js valfile = os.path.join(os.environ['GL_ADMIN_BASE'], '2fa/validations', '%s.js' % os.environ['GL_USER']) logger.debug('Loading authorized ips from %s' % valfile) valdata = {} if os.access(valfile, os.R_OK): try: fh = open(valfile, 'r') jdata = fh.read() fh.close() valdata = anyjson.deserialize(jdata) except: logger.critical( 'Validations file exists, but could not be parsed!') logger.critical('Please rerun "2fa val" to create a new file!') gl_fail_exit() return valdata
def _add_event(self, title, date, start, end, all_day, url=None, description=None): if isinstance(title, unicode): title = title.encode('utf8') values = dict(title=title, all_day=all_day and 'true' or 'false', guid=self.guid, ) if date: values['date'] = mktime(date.timetuple()) else: values['start'] = mktime(start.timetuple()) values['end'] = mktime(end.timetuple()) if url is not None: values['url'] = url if description is not None: values['description'] = description data = urlencode(values) url = self.base_url + '/api/events' req = urllib2.Request(url, data) response = urllib2.urlopen(req) content = response.read() event = anyjson.deserialize(content)['event'] self._massage_event(event) return event, response.code == 201
def messageReceived(self, ignored, channel, message, nocache=False): try: json = anyjson.deserialize(message) db = self.factory.db db.last_data = reactor.seconds() pattern = json["pattern"] metric = json["metric"] yield db.addPatternMetric(pattern, metric) triggers = yield db.getPatternTriggers(pattern) if not triggers: yield db.removePattern(pattern) metrics = yield db.getPatternMetrics(pattern) for metric in metrics: yield db.delMetric(metric) yield db.delPatternMetrics(pattern) for trigger_id in triggers: if nocache: yield db.addTriggerCheck(trigger_id) else: yield db.addTriggerCheck(trigger_id, cache_key=trigger_id, cache_ttl=config.CHECK_INTERVAL) except Exception: log.err()