def shareload(identifier): chatname = None if 'chatname' in session: chatname = session['chatname'] conn = sqlite3.connect(FROLIC_DB) c = conn.cursor() c.execute("SELECT places, swaps, waypoints FROM routes WHERE identifier=?", [identifier]) try: places, swaps, waypoints = c.fetchone() except: return redirect(url_for('/'), 302) places = jsonpickle.decode(places) swaps = jsonpickle.decode(swaps) if not 'cache' in session: session['cache'] = {} session['cache']['places'] = jsonpickle.encode(places) session['cache']['swaps'] = jsonpickle.encode(deque(swaps)) session['cache']['waypoints'] = waypoints conn.commit() conn.close() return render_template('places.html', places=places, swaps=swaps, waypoints_list=waypoints, mobile=_is_mobile(request), chatname=chatname, is_share=True)
def jsonrpc_get_blocks_info(self, block_types): """\brief Gets ful information about the given set of blocks \param block_types (\c list[string]) The block types (e.g., ["PFQSource"]) \return (\c ReturnValue) The information (list[BlockInfo]) """ f = None try: f = open(self.__bm_base_path + "/daemon/core/blockinfo.py") except IOError as e: msg = "No blockinfo.py file available, please run the " + \ "blockinfoparser.py script to generate this file" r = ReturnValue(ReturnValue.CODE_FAILURE, msg, None) return jsonpickle.encode(r) f.close() from core.blockinfo import block_infos blocks = [] not_found = [] for block_type in block_types: if block_infos.has_key(block_type): blocks.append(block_infos[block_type]) else: not_found.append(block_type) msg = "" if len(not_found) > 0: msg = "not found:" + str(not_found) r = ReturnValue(ReturnValue.CODE_SUCCESS, msg, blocks) return jsonpickle.encode(r)
def log(self, log_statement, payload=None, level=None, span_guid=None): """ Record a log statement with optional payload and importance level. :param str logStatement: log text :param payload: an string, int, object, etc. whose serialization will be sent to the server :param str span_guid: associate the log with a specifc span operation by providing a span_guid :param char level: for internal use only importance level of log - 'I' info, 'W' warning, 'E' error, 'F' fatal """ if self._disabled_runtime: return timestamp = util._now_micros() guid = self._runtime.guid log_record = ttypes.LogRecord(timestamp, guid, message=log_statement, level=level, span_guid=span_guid) if payload is not None: try: log_record.payload_json = \ jsonpickle.encode(payload, constants.JSON_UNPICKLABLE, max_depth=constants.JSON_MAX_DEPTH) except: log_record.payload_json = jsonpickle.encode(constants.JSON_FAIL) self._add_log(log_record)
def flatten(self, obj, data): dictionary = Dictionary() dictionary.set_item("@types", jsonpickle.encode(obj.get_type(), unpicklable=False)) for slot in obj.get_slot(): dictionary.set_item(slot,jsonpickle.encode(obj.get_frame(slot), unpicklable=False)) self.writeFileExpression(dictionary) return jsonpickle.encode(dictionary, unpicklable=False)
def test_update_props_name_put(self): method = 'test_update_props_name_put' rv = self.app.post('/api/assets', content_type='application/json', data=jsonpickle.encode(self.new_asset_dict)) url = '/api/assets/name/%s' % quote(self.new_asset.theName) upd_asset = self.new_asset upd_asset.theName = 'Test2' upd_asset_dict = self.new_asset_dict upd_asset_dict['object'] = upd_asset upd_asset_body = jsonpickle.encode(upd_asset_dict) rv = self.app.put(url, content_type='application/json', data=upd_asset_body) url = '/api/assets/name/%s/properties' % quote(self.new_asset.theName) self.logger.info('[%s] Old asset property environment name: %s', method, self.new_asset_props[0].theEnvironmentName) upd_asset_props = self.new_asset_props upd_asset_props[0].theEnvironmentName = 'Psychosis' upd_asset_props_dict = { 'session_id': 'test', 'object': upd_asset_props } upd_asset_props_body = jsonpickle.encode(upd_asset_props_dict) self.logger.info('[%s] JSON data: %s', method, upd_asset_props_body) rv = self.app.put(url, content_type='application/json', data=upd_asset_props_body) self.logger.debug('[%s] Response data: %s', method, rv.data) json_resp = json_deserialize(rv.data) self.assertIsNotNone(json_resp, 'No results after deserialization') message = json_resp.get('message', None) self.assertIsNotNone(message, 'No message returned') rv = self.app.get('/api/assets/name/Test2/properties?session_id=test') self.logger.debug('[%s] Response data: %s', method, rv.data) asset_props = jsonpickle.decode(rv.data) self.logger.info('[%s] Asset property environment: %s\n', method, asset_props[0]['theEnvironmentName'])
def jsonrpc_read_variables(self, comp_id, json_variables): """\brief Reads a set of block variables. Each VariableInfo object needs to have the name of the block and the variable name set. The access_type member should be set to "read". The function returns the same list, this time with the "value" member filled out. \param comp_id (\c string) The composition id \param json_variables (\c list[VariableInfo]) Json-encode variables \return (\c ReturnValue) The values (list[VariableInfo]) """ if not self.__bm_processes.has_key(comp_id): msg = "no composition with the given id exists, can't read variables" return jsonpickle.encode(ReturnValue(ReturnValue.CODE_FAILURE, msg, None)) variables = [] try: variables = jsonpickle.decode(json_variables) except: # Non-jsonpickle encoding, supports non-python clients (assumes # a list of lists) for v in json_variables: variables.append(core.block.VariableInfo(str(v[0]), str(v[1]), str(v[2]), str(v[3]))) port = self.__bm_processes[comp_id].get_port() url = "http://localhost:" + str(port) + "/" proxy = xmlrpclib.ServerProxy(url) results = [] for variable in variables: result = pickle.loads(proxy.read_variable(pickle.dumps(variable))) results.append(result) r = ReturnValue(ReturnValue.CODE_SUCCESS, "", results) return jsonpickle.encode(r)
def test_settings_put(self): url = '/api/settings?session_id=test' method = 'test_settings_put' rv = self.app.get(url) json_dict = jsonpickle.decode(rv.data) self.logger.info('[%s] Current project name: %s', method, json_dict['projectName']) settings = self.convert_to_obj(json_dict) settings.projectName = 'A new project name' new_json_dict = { 'session_id': 'test', 'object': settings } json_body = jsonpickle.encode(new_json_dict) rv = self.app.put(url, data=json_body, content_type='application/json') self.assertIsNotNone(rv.data, 'No response') new_json_dict = jsonpickle.decode(rv.data) self.assertIsInstance(new_json_dict, dict, 'Response is not a valid JSON dictionary') message = new_json_dict.get('message', None) self.assertIsNotNone(message) self.logger.info('[%s] Message: %s', method, message) rv = self.app.get(url) new_json_dict = jsonpickle.decode(rv.data) self.logger.info('[%s] New project name: %s\n', method, new_json_dict['projectName']) new_json_dict = { 'session_id': 'test', 'object': json_dict } json_body = jsonpickle.encode(new_json_dict) rv = self.app.put(url, data=json_body, content_type='application/json')
def _wrapper(*args, **kw): try: response_data = jsonpickle.encode(func(*args, **kw)) except: response_data = jsonpickle.encode(dict(error=traceback.print_stack)) response = HttpResponse(response_data, content_type="application/json") return response
def test_put_name(self): method = 'test_put_name' rv = self.app.post('/api/assets', content_type='application/json', data=jsonpickle.encode(self.new_asset_dict)) url = '/api/assets/name/%s' % quote(self.new_asset.theName) upd_asset = self.new_asset upd_asset.theName = 'Test2' upd_asset_dict = self.new_asset_dict upd_asset_dict['object'] = upd_asset upd_asset_body = jsonpickle.encode(upd_asset_dict) self.logger.info('[%s] JSON data: %s', method, upd_asset_body) rv = self.app.put(url, content_type='application/json', data=upd_asset_body) if (sys.version_info > (3,)): putResponse = rv.data.decode('utf-8') else: putResponse = rv.data self.logger.debug('[%s] Response data: %s', method, putResponse) json_resp = json_deserialize(putResponse) self.assertIsNotNone(json_resp, 'No results after deserialization') message = json_resp.get('message', None) self.assertIsNotNone(message, 'No message returned') rv = self.app.get('/api/assets/name/Test2?session_id=test') if (sys.version_info > (3,)): asset = json_deserialize(rv.data.decode('utf-8')) else: asset = json_deserialize(rv.data) self.logger.info('[%s] Asset: %s\n', method, asset['theName'])
def _encode(self, logs, errors=None, profile=None, extension_data=None): data = {"logs": logs} if errors: data['errors'] = errors if profile: data['profile'] = profile if extension_data: data['extension_data'] = extension_data try: data = jsonpickle.encode(data, unpicklable=False, max_depth=CONST.JSONPICKLE_DEPTH) except Exception, e: # this exception may be fired, because of buggy __repr__ or # __str__ implementations on various objects errors = [self._handle_internal_exception(e)] try: data = jsonpickle.encode({"errors": errors }, unpicklable=False, max_depth=CONST.JSONPICKLE_DEPTH) except Exception, e: # even unable to serialize error message data = jsonpickle.encode( {"errors": { "message": "FirePython has a really bad day :-(" } }, unpicklable=False, max_depth=CONST.JSONPICKLE_DEPTH )
def read_value(self, path): """ Reads the value of the hardware object at the given path. The hardware object must have the get_value method. :param path: Path to a hardware object. :type path: str :returns: The 'value' of the hardware object. :rtype: Return type of get_value of the hardware object. """ value = None if path == '/beamline/default-acquisition-parameters/': value = jsonpickle.encode(self.get_default_acquisition_parameters()) elif path == '/beamline/default-path-template/': value = jsonpickle.encode(self.get_default_path_template()) else: hwobj = None try: hwobj = self._object_by_path[path] value = hwobj.get_value() except KeyError: raise KeyError('Invalid path') return value
def write_results_json(results, path): """Write a resultset to a JSON file Also provides a JSON file of the topology used in the experiment Parameters ---------- results : ResultSet The set of results path : str The path of the file to which write """ JSON_results = jp.encode(results) with open(path, 'wb') as f: f.write(JSON_results) try: topology_path = path[:path.rfind('.')] + "_topology_" except: topology_path = path + "_topology.json" try: e = results[0][1]['TOPOLOGY'] for i in range(0, len(results)): exp = results[i][1]['TOPOLOGY']['TOPOLOGY'] topology_out = {'nodes':exp.stacks(), 'edges':exp.edges()} topology_out = jp.encode(topology_out) with open(topology_path + str(i) + ".json", 'wb') as f: f.write(topology_out) except: print "Topology missing from results, add 'TOPOLOGY' to config DATA_COLLECTORS for this data"
def parse(ean): resp = '' dvdToRemove = None if request.method == 'GET' or request.method == 'DELETE': for dvd in MovieCollection: if dvd.ean == ean: if request.method == 'GET': if DEBUG == True: print 'GET: ' + ean resp = jsonpickle.encode(dvd) break elif request.method == 'DELETE': if DEBUG == True: print 'DELTE: ' + ean MovieCollection.remove(dvd) break elif request.method == 'POST': if DEBUG == True: print 'POST: ' + ean dvd = ParsingManager.Parse(ean) MovieCollection.append(dvd) resp = jsonpickle.encode(dvd) return resp
def index(): session['cache'] = {} request_form = dict(request.form) request_form['planner-food'] = food_samples[random.randint(0, len(food_samples)-1)] form = MainForm(ImmutableMultiDict(request_form)) if form.validate_on_submit() or not request.form.get('planner-location'): return render_template('index.html', form=form, mobile=_is_mobile(request)) location = request.form['planner-location'] number_places = strategy.number_places(request.form['planner-number_places']) strat = strategy.get(request.form['planner-type']) distance = request.form['planner-distance'] time = request.form['planner-time'] try: if isinstance(strat, strategy.ReverseFoodStrategy): places, swaps = strat.generate(tuple(request.form['planner-coords'].split(',')), 3, distance, request.form['planner-food']) else: places, swaps = strat.generate(location, number_places, distance, time) _process_ratings(places) except: traceback.print_exc() return render_template('index.html', form=form, mobile=_is_mobile(request), error="Frolic couldn't come up with anything exciting - sorry! Either try again or " "make some new choices.") session['cache']['places'] = jsonpickle.encode(places) session['cache']['swaps'] = jsonpickle.encode(deque(swaps)) waypoints = _stringify([unicode(u' '.join([place.street, place.city, place.country]).replace("'", "\\'")) for place in places]) session['cache']['waypoints'] = waypoints return render_template('places.html', places=places, swaps=swaps, waypoints_list=waypoints, mobile=_is_mobile(request))
def test_dependency_name_put(self): method = 'test_dependency_name_put' url = '/api/dependencies' new_dep = self.prepare_new_dependency() json_dict = { 'session_id': 'test', 'object': new_dep } json_body = jsonpickle.encode(json_dict) self.app.post(url, data=json_body, content_type='application/json') new_name = (new_dep.theEnvironmentName, new_dep.theDepender, new_dep.theDependee, new_dep.theDependency) upd_dep = new_dep upd_dep.theEnvironmentName = self.existing_environment_2 json_dict = { 'session_id': 'test', 'object': upd_dep } json_body = jsonpickle.encode(json_dict) upd_url = '/api/dependencies/environment/%s/depender/%s/dependee/%s/dependency/%s?session_id=test' % new_name rv = self.app.put(upd_url, data=json_body, content_type='application/json') self.assertIsNotNone(rv.data, 'No response') json_dict = jsonpickle.decode(rv.data) self.assertIsInstance(json_dict, dict, 'Response is not a valid JSON dictionary') message = json_dict.get('message', None) self.assertIsNotNone(message, 'No message in response') self.assertNotIsInstance(message, dict, 'Message is an object') self.logger.info('[%s] Message: %s\n', method, message) delete_name = (upd_dep.theEnvironmentName, upd_dep.theDepender, upd_dep.theDependee, upd_dep.theDependency) del_get_url = '/api/dependencies/environment/%s/depender/%s/dependee/%s/dependency/%s?session_id=test' % delete_name rv = self.app.get(del_get_url) self.logger.debug('[%s] Updated dependency:\n%s\n', method, rv.data) self.app.delete(del_get_url)
def insert(self, report): """ insert NessusReport in the backend :param NessusReport: :return: str the ident of the object in the backend for future usage or None """ j = jsonpickle.encode(report, unpicklable=False) if python_version == 3: # python3 j2 = jsonpickle.encode(report).encode("utf-8") b64 = base64.b64encode(j2).decode(encoding="UTF-8") else: # python 2 j2 = jsonpickle.encode(report) b64 = base64.b64encode(j2) docid = hash(report.name) docu = { "hash": docid, "json": j, "json_base64": b64, "date": datetime.datetime.utcnow(), "name": report.name, "endtime": report.endtime, "ipaddress": [host.address for host in report.hosts], } try: self.es.index(index=self.index, doc_type=self.store, body=docu, id=docid) return docid except: raise
def onOpen(self): payload = {} command = {} if self.admin: payload = get_payload(self.factory.clients) payload['cmd'] = 'updateclients' encoded_payload = jsonpickle.encode(payload) self.sendMessage(encoded_payload) else: try: # {"cmd": {"chcolor": "red", "stn": "1"}} command['cmd'] = 'ft' command['val'] = choice(words.words) payload['cmd'] = command encoded_word = jsonpickle.encode(payload) print encoded_word except (RuntimeError, TypeError, NameError) as e: print e try: self.sendMessage(encoded_word) print "\nAttempted to send message to client.\n" except (RuntimeError, TypeError, NameError) as e: print e print "\nMessage could not be sent.\n"
def insert(self, report): """ insert NessusReport in the backend :param NessusReport: :return: str the ident of the object in the backend for future usage or None """ j = jsonpickle.encode(report, unpicklable=False) j2 = jsonpickle.encode(report) docid = hash(report) docu = {"hash": docid, "json": j, "json_base64": base64.b64encode(j2), "date": datetime.datetime.utcnow(), "name": report.name, "endtime": report.endtime, "ipaddress": [host.address for host in report.hosts]} try: self.es.index( index=self.index, doc_type=self.store, body=docu, id=docid ) return docid except: raise
def write_connections_to_file(self): """Write the data connections to disk""" try: zin = zipfile.ZipFile (self._fileName, 'r', zipfile.ZIP_DEFLATED) zout = zipfile.ZipFile (self._fileName+"tmp", 'w', zipfile.ZIP_DEFLATED) for item in zin.infolist(): buffer = zin.read(item.filename) if (item.filename != 'connections'): zout.writestr(item, buffer) pickled = jsonpickle.encode(self.connections) info = zipfile.ZipInfo("connections") info.compress_type = zipfile.ZIP_DEFLATED zout.writestr(info, pickled) zout.close() zin.close() shutil.copyfile(self._fileName+"tmp", self._fileName) os.remove(self._fileName+"tmp") self.connState = "saved" except IOError: zout = zipfile.ZipFile (self._fileName, 'w', zipfile.ZIP_DEFLATED) pickled = jsonpickle.encode(self.connections) info = zipfile.ZipInfo("connections") info.compress_type = zipfile.ZIP_DEFLATED zout.writestr(info, pickled) zout.close() self.connState = "saved"
def set_scores(self, team_scores): """ Function to import and process the json object exported by get_scores() """ if "teamscore" in team_scores: self.scores = team_scores["teamscore"] if globalvars.debug: print "Set team score for %s to %s" % (self.teamname, self.scores) json_obj = jsonpickle.encode(self.scores) self.pp.pprint(json_obj) else: json_obj = jsonpickle.encode(team_scores) raise Exception ("Invalid team_scores hash, missing team score! \n%s\n" % json_obj) if "round" in team_scores: self.this_round = team_scores["round"] if globalvars.debug: print "Set round for %s to %s" % (self.teamname, self.this_round) else: json_obj = jsonpickle.encode(team_scores) raise Exception ("Invalid team_scores hash, missing round! \n%s\n" % json_obj) if "hosts" in team_scores: for host in self.hosts: if host in team_scores["hosts"]: self.hosts[host].set_scores(team_scores["hosts"][host]) else: json_obj = jsonpickle.encode(team_scores) raise Exception ("Invalid team_scores hash! \n%s\n" % json_obj) else: json_obj = jsonpickle.encode(team_scores) raise Exception ("Invalid team_scores hash, missing hosts! \n%s\n" % json_obj)
def join_chat(identifier): conn = sqlite3.connect(FROLIC_DB) c = conn.cursor() c.execute("SELECT chat FROM chats WHERE identifier=?", [identifier]) if c.rowcount <= 0: c.execute("INSERT INTO chats (identifier, chat) VALUES (?, ?)", [identifier, jsonpickle.encode({'members': [], 'messages': []})]) conn.commit() c.execute("SELECT chat FROM chats WHERE identifier=?", [identifier]) chat = c.fetchone() chat = jsonpickle.decode(chat[0]) token = str(uuid.uuid4()) print request.data chatname = json.loads(request.data)['name'] member_entry = { token: chatname } session['chatname'] = chatname chat['members'].append(member_entry) response = {'token': token, 'members': chat['members'], 'messages': chat['messages'] } c.execute("UPDATE chats SET chat = ? where identifier = ?", [identifier, jsonpickle.encode(chat)]) conn.commit() conn.close() return jsonify(response)
def __eq__(self, other): if not isinstance(other, UserUttered): return False else: return (self.text, self.intent.get("name"), jsonpickle.encode(self.entities), self.parse_data) == \ (other.text, other.intent.get("name"), jsonpickle.encode(other.entities), other.parse_data)
def sign_in_user(): json = request.json users = User.objects(provider_id=json['providerId']) if(len(users) > 0): return jsonpickle.encode(users.get(), unpicklable=False) else: response.status = 401 return jsonpickle.encode({'errors': ['Unauthorized user']})
def on_status(self, status): #simplified and readable date for the tweets date = status.created_at.date().strftime("20%y/%m/%d") time = status.created_at.time().strftime("%H:%M:%S")#GMT time #send data to file for analysis print jsonpickle.encode(status)
def get_partial_uploads(request): user_id = request.user.id d = ChunkedUpload.objects.filter(completed_on__isnull=True, user_id=user_id).order_by('created_on') if d: out = serializers.serialize('json', d) return HttpResponse(jsonpickle.encode(out)) else: return HttpResponse(jsonpickle.encode(''))
def post(self, request, *args, **kwargs): try: client = self.authenticate_user(request, *args, **kwargs) if client is not None: attributes = ['name', 'description', 'location', 'tags'] for attribute in attributes: if not attribute in request.POST: return HttpResponseBadRequest("Cannot find '%s' attribute" % attribute) subject = request.POST['name'] sender = client.email shameless_plug = "SENT USING REUSE MOBILE APP. GET IT AT armadillo.xvm.mit.edu." description = request.POST['description'] text = description + "\n\n\n\n_______________________________________________\n"+shameless_plug name = request.POST['name'] thread_id = str(time.time())+"@"+MAIN_URL headers = [('REUSE-MOBILE', 'true'), ('Message-ID', thread_id)] reuse_list = [REUSE_EMAIL_ADDRESS] # testing status = send_mail(sender, reuse_list, subject, text, headers) if status == 'success': location = request.POST['location'] tags = request.POST['tags'] new_thread = EmailThread.objects.create(subject=subject) new_email = NewPostEmail.objects.create(sender=sender, subject=subject, text=text, thread=new_thread) ipl = ItemPostLocator() data = ipl.get_location(location.upper()) if ipl is not None: lon = str(data['lon']) lat = str(data['lat']) else: lon = '' lat = '' new_item = Item.objects.create(name=name, description=description, location=location, tags=tags, post_email=new_email, lat=lat, lon=lon, is_email=False, thread=new_thread) notify_all_users() response = jsonpickle.encode({"success": True}) return HttpResponse(response) else: logger.error("POST: "+status + '\n\n') response = jsonpickle.encode({"success": False}) return HttpResponse(response) else: return HttpResponseForbidden("Invalid Request.") except Exception as e: logger.exception(str(e)) return HttpResponseServerError(e if DEBUG else "An error has occured.")
def getCommunityCards(): data = request.json gameID = data["gameID"] game = gm.getByID(gameID) round = game.getCurrentRound() arr = [jsonpickle.encode(card) for card in round.community_cards] return jsonpickle.encode(arr)
def joinGame(): data = request.json gameID = data["gameID"] playerID = data["playerID"] players = gm.joinGame(playerID, gameID) if players is not None: return jsonpickle.encode({playerID: "joined"}) else: return jsonpickle.encode({playerID: "not found"})
def newRound(): data = request.json gameID = data["gameID"] game = gm.getByID(gameID) val = game.newRound() if val: return jsonpickle.encode({gameID: "round created"}) else: return jsonpickle.encode({gameID: "still in previous round"})
def startGame(): data = request.json gameID = data["gameID"] game = gm.getByID(gameID) if game is not None: game.start() return jsonpickle.encode({gameID: "started"}) else: return jsonpickle.encode({gameID: "not found"})
def main(): # start the logger logger = logging.getLogger(__name__) handler = RotatingFileHandler('robot_log.log', "a", maxBytes=960000, backupCount=5) logger.addHandler(handler) # check for a default config file if os.path.isfile( "settings.default.json") and not os.path.isfile("settings.json"): open("settings.json", "a").close() copyfile("settings.default.json", "settings.json") # read the file with open("settings.json", "r") as f: values = jsonpickle.loads(f.read()) # get the results and save them global robot_type, m_settings, d_settings, state robot_type = values["type"] m_settings = values[robot_type] d_settings = values["drive"] # setup the state object if is_gripper(): state = GripperState(m_settings["lift_min"], m_settings["grip_min"]) # initalize i2c and piconzero piconzero.init() # Open the socket and start the listener thread netwk_mgr = NetworkManager(logger) netwk_mgr.start() # Make robot stuff robot_disabled = True watchdog = Watchdog(logger) if is_elevator(): piconzero.set_output_config(m_settings["motor_channel"], 1) # set channel 0 to PWM mode if is_gripper(): piconzero.set_output_config(m_settings["lift_servo"], 2) piconzero.set_output_config(m_settings["grip_servo"], 2) # set channel 0 and 1 to Servo mode # Initialization should be done now, start accepting packets while True: try: raw_pack = netwk_mgr.get_next_packet() if raw_pack is not None: try: pack = jsonpickle.decode( raw_pack ) # recieve packets, decode them, then de-json them except JSONDecodeError as e: print(e) logger.warning(str(e)) continue watchdog.reset() # Type-check the data if type(pack) is not Packet: print("pack is not a Packet", file=sys.stderr) continue # Process the packet if pack.type == PacketType.STATUS: # Check the contents of the packet if type(pack.data) is RobotStateData: if pack.data == RobotStateData.ENABLE: robot_disabled = False # Reinitialize the picon zero piconzero.init() if is_elevator(): piconzero.set_output_config( m_settings["motor_channel"], 1) # set channel 0 to PWM mode if is_gripper(): piconzero.set_output_config( m_settings["lift_servo"], 2) piconzero.set_output_config( m_settings["grip_servo"], 2) # set channel 0 and 1 to Servo mode continue elif pack.data == RobotStateData.DISABLE: robot_disabled = True piconzero.cleanup() continue elif pack.data == RobotStateData.E_STOP: piconzero.cleanup() break elif pack.type == PacketType.REQUEST: # Check for the request type if pack.data == RequestData.STATUS: # Send a response packet = Packet( PacketType. RESPONSE, # generate a packet saying if the robot is enabled or disabled RobotStateData.DISABLE if robot_disabled else RobotStateData.ENABLE) netwk_mgr.send_packet(jsonpickle.encode(packet)) elif pack.type == PacketType.RESPONSE: # do more stuff continue elif pack.type == PacketType.DATA: # See if the robot is disabled if robot_disabled: continue # Check and see if a list of packets was sent if type(pack.data) is list: for item in pack.data: process_data(item) else: process_data(pack) except Exception as e: logger.error(e, exc_info=True) pass # Emergency Stopped loop while True: # Disable all outputs piconzero.cleanup() # Accept a packet raw_pack = netwk_mgr.get_next_packet() if raw_pack is not None: pack = jsonpickle.decode( raw_pack) # receive packets, decode them, then de-json them # Check for a request if pack.type == PacketType.REQUEST: # Send a response, no matter the request type packet = Packet( PacketType.RESPONSE, RobotStateData.E_STOP ) # generate a packet saying that this robot is e-stopped netwk_mgr.send_packet(packet) time.sleep( .250 ) # delay for 250ms, don't want to spam the picon zero with cleanup requests pass
borderColor: [ 'rgba(255, 99, 132, 1)', 'rgba(54, 162, 235, 1)', 'rgba(255, 206, 86, 1)', 'rgba(75, 192, 192, 1)', 'rgba(153, 102, 255, 1)', 'rgba(255, 159, 64, 1)' ], borderWidth: 1 }] } ''' new_data = { 'labels': list(partials.keys()), 'datasets': [{ 'label': 'Inner links', 'data': list(partials.values()) }] } return new_data if __name__ == '__main__': CRAWL_ID = 1 data = inner_links_data(CRAWL_ID) print("Data: %r" % data) print("Data: %s" % jsonpickle.encode(data))
def __hash__(self) -> int: return hash((self.key, jsonpickle.encode(self.value)))
def pretty(obj): print(jsonpickle.encode(obj, indent=2))
def test_no_error(self): encoded = jsonpickle.encode(self.to_pickle, fail_safe=lambda e: None) decoded = jsonpickle.decode(encoded) self.assertEqual(decoded[0], None) self.assertEqual(decoded[1], 'good')
data.type = "long" sample_list.append(data) data = SentinelElement() data.key = "cpu_usage_total" if (container.id + "_" + "cpu_stats-cpu_usage-total_usage") in container_cache and \ container_cache[container.id + "_" + "cpu_stats-cpu_usage-total_usage"] > 0: data.value = stat["cpu_stats"]["cpu_usage"]["total_usage"] - \ container_cache[container.id + "_" + "cpu_stats-cpu_usage-total_usage"] container_cache[container.id + "_" + "cpu_stats-cpu_usage-total_usage"] = \ stat["cpu_stats"]["cpu_usage"]["total_usage"] if data.value < 0: data.value = 0 else: container_cache[container.id + "_" + "cpu_stats-cpu_usage-total_usage"] = \ stat["cpu_stats"]["cpu_usage"]["total_usage"] data.value = 0 data.type = "long" sample_list.append(data) container_data["metrics"] = sample_list element_list.append(container_data) msg_dict = {} msg_dict["host"] = hostname msg_dict["unixtime"] = str(time.time()) # unix time in seconds msg_dict["agent"] = "sentinel-docker-agent" msg_dict["values"] = element_list msg_to_send = jsonpickle.encode(msg_dict) # print(msg_to_send) send_msg(msg_to_send) time.sleep(int(get_element_value("agent", "period")))
def test_newstyleslots_string_slot(self): obj = ThingWithStringSlots('a', 'b') jsonstr = jsonpickle.encode(obj) newobj = jsonpickle.decode(jsonstr) self.assertEqual(newobj.ab, 'ab')
def test_newstyleslots_iterable(self): obj = ThingWithIterableSlots('alpha', 'bravo') jsonstr = jsonpickle.encode(obj) newobj = jsonpickle.decode(jsonstr) self.assertEqual(newobj.a, 'alpha') self.assertEqual(newobj.b, 'bravo')
def get_pubmed_texts(self): '''get PubMed texts from a list of pubmed ids and extract entities''' # we use a cache for minimizing the number of geonames queries cache_dict = {} # retrieve the records using the API or get it from cache if USE_REDIS: proc_red = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_PMC_PROCESSED_DB) unpr_red = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_PMC_CACHE_DB) for pmid in self.pubmedids: # first check if the records exist in the processed pubmed cache if proc_red.exists(pmid): pmrec_json = proc_red.get(pmid) logging.debug("%s found in PMC processed cache DB", pmid) pubmed_record = jsonpickle.decode(pmrec_json) self.pubmed_records[pmid] = pubmed_record # if not check if it exists in our unprocessed pubmed cache else: if unpr_red.exists(pmid): # if exists get from cache raw_json = unpr_red.get(pmid) logging.debug("%s found in PMC cache DB", pmid) else: # download json raw_json = download_pubmed_record(pmid) if not raw_json: continue try: # add to unprocessed cache json.loads(raw_json) unpr_red.set(pmid, raw_json) except Exception as e2: logging.error("Invalid JSON: %s for %s", e2, pmid) continue # if files have been downloaded raw_text = "" if exists(SUPPLEMENTAL_DATA_DIR + pmid): raw_text = extract_text_from_files( SUPPLEMENTAL_DATA_DIR + pmid) # create record object and process pubmed_record = PubMedRecord(pmid, raw_json, raw_text) pubmed_record.extract_entities() cache_dict = pubmed_record.normalize_entities(cache_dict) self.pubmed_records[pmid] = pubmed_record # add to cache if using redis pmrec_json = jsonpickle.encode(pubmed_record) proc_red.set(pmid, pmrec_json) else: # if not using redis cache, just download the articles and process for pmid in self.pubmedids: # if files have been downloaded if exists(SUPPLEMENTAL_DATA_DIR + pmid): raw_text = extract_text_from_files(SUPPLEMENTAL_DATA_DIR + pmid) raw_json = "{}" else: raw_text = "" # download json raw_json = download_pubmed_record(pmid) if raw_json or raw_text: pubmed_record = PubMedRecord(pmid, raw_json, raw_text) self.pubmed_records[pmid] = pubmed_record pubmed_record.extract_entities() cache_dict = pubmed_record.normalize_entities(cache_dict)
def roundtrip(self, ob): encoded = jsonpickle.encode(ob) decoded = jsonpickle.decode(encoded) self.assertEqual(decoded, ob) return decoded
def serialize_distribution(self, dist): return jsonpickle.encode(dist)
def test_counter_roundtrip_with_keys(self): counter = collections.Counter({1: 2}) encoded = jsonpickle.encode(counter, keys=True) decoded = jsonpickle.decode(encoded, keys=True) self.assertTrue(type(decoded) is collections.Counter) self.assertEqual(decoded.get(1), 2)
def __hash__(self) -> int: return hash((self.text, jsonpickle.encode(self.data)))
def to_json_str(self): return jsonpickle.encode(self)
def calculate_checksum(obj): encoded = jsonpickle.encode(obj).encode("utf-8") return hashlib.sha256(encoded).digest()
def test_ordered_dict_unpicklable(self): d = collections.OrderedDict([('c', 3), ('a', 1), ('b', 2)]) encoded = jsonpickle.encode(d, unpicklable=False) decoded = jsonpickle.decode(encoded) self.assertEqual(d, decoded)
def writeConfigJson(config): jsonStr = jsonpickle.encode(config) with open(CONFIG_FILE_NAME, 'w') as f: f.write(jsonStr)
def to_json(self) -> str: return jsonpickle.encode(self)
def test_int_keys_in_object_with_getstate_only(self): obj = IntKeysObject() encoded = jsonpickle.encode(obj, keys=True) decoded = jsonpickle.decode(encoded, keys=True) self.assertEqual(obj.data, decoded.data)
def __repr__(self): return jsonpickle.encode(self)
def test_enum_unpicklable(self): obj = Message(MessageTypes.STATUS, MessageCommands.STATUS_ALL) encoded = jsonpickle.encode(obj, unpicklable=False) decoded = jsonpickle.decode(encoded) self.assertTrue('message_type' in decoded) self.assertTrue('command' in decoded)
def random_string(prefix, maxlen): symbols = string.ascii_letters + string.digits + " " * 10 return prefix + "".join([ random.choice(symbols).strip() for i in range(random.randrange(maxlen)) ]) def random_status(): symbols = ["development", "release", "stable", "obsolete"] return "".join([random.choice(symbols).strip()]) def random_view_status(): symbols = ["public", "private"] return "".join([random.choice(symbols).strip()]) testdata = [ Project(name=random_string("name", 10), status=random_status(), description=random_string("description", 40), view_status=random_view_status()) for i in range(n) ] file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", f) with open(file, "w") as out: jsonpickle.set_encoder_options("json", indent=2) out.write(jsonpickle.encode(testdata))
def to_json(self): jsonpickle.set_encoder_options('simplejson') return jsonpickle.encode(self, unpicklable=False)
def asJson(self): json = jsonpickle.encode(self) return json
def test_thing_with_lamda(self): obj = Thing(lambda: True) jsonstr = jsonpickle.encode(obj) newobj = jsonpickle.decode(jsonstr) self.assertFalse(hasattr(newobj, 'name'))
def __hash__(self) -> int: return hash( (self.text, self.intent_name, jsonpickle.encode(self.entities)))
def test_newstyleslots(self): obj = ThingWithSlots(True, False) jsonstr = jsonpickle.encode(obj) newobj = jsonpickle.decode(jsonstr) self.assertTrue(newobj.a) self.assertFalse(newobj.b)
def test_newstyleslots_with_children(self): obj = ThingWithSlots(Thing('a'), Thing('b')) jsonstr = jsonpickle.encode(obj) newobj = jsonpickle.decode(jsonstr) self.assertEqual(newobj.a.name, 'a') self.assertEqual(newobj.b.name, 'b')
def test_custom_err_msg(self): CUSTOM_ERR_MSG = 'custom err msg' encoded = jsonpickle.encode(self.to_pickle, fail_safe=lambda e: CUSTOM_ERR_MSG) decoded = jsonpickle.decode(encoded) self.assertEqual(decoded[0], CUSTOM_ERR_MSG)
def write_fast_growing_keyword_to_json_file(self): with open_utf8_file_to_write( get_independent_os_path( ["export", "fast_growing_keyword.json"])) as stream: stream.write(jsonpickle.encode(self._fast_growing_list))