def shareload(identifier): chatname = None if 'chatname' in session: chatname = session['chatname'] conn = sqlite3.connect(FROLIC_DB) c = conn.cursor() c.execute("SELECT places, swaps, waypoints FROM routes WHERE identifier=?", [identifier]) try: places, swaps, waypoints = c.fetchone() except: return redirect(url_for('/'), 302) places = jsonpickle.decode(places) swaps = jsonpickle.decode(swaps) if not 'cache' in session: session['cache'] = {} session['cache']['places'] = jsonpickle.encode(places) session['cache']['swaps'] = jsonpickle.encode(deque(swaps)) session['cache']['waypoints'] = waypoints conn.commit() conn.close() return render_template('places.html', places=places, swaps=swaps, waypoints_list=waypoints, mobile=_is_mobile(request), chatname=chatname, is_share=True)
def test_republish(self): # create a build request build_string = '{"py/object": "pybit.models.BuildRequest",\ "timestamp": null, "job": {"py/object": "pybit.models.Job",\ "packageinstance": {"py/object": "pybit.models.PackageInstance",\ "format": {"py/object": "pybit.models.Format", "id": 5,\ "name": "completezip"}, "package": {"py/object": "pybit.models.Package",\ "version": "1.2", "id": 2, "name": "col10642"}, "id": 1,\ "master": true, "suite": {"py/object": "pybit.models.Suite",\ "id": 3, "name": "latex"}, "distribution": {"py/object": \ "pybit.models.Dist", "id": 2, "name": "cnx"}, "arch": \ {"py/object": "pybit.models.Arch", "id": 9, "name": "desktop"},\ "build_env": null}, "id": 36, "buildclient": null}, "transport": \ {"py/object": "pybit.models.Transport", "uri": "http://cnx.org/", \ "id": null, "vcs_id": "", "method": ""}, "web_host": "localhost:8080"}' build_request = jsonpickle.decode(build_string) # republish queue = 'cnx_desktop_latex_completezip' self.channel.queue_declare(queue=queue) coyote.republish(build_request, queue, self.channel) # get it off the queue and test it tag = self.channel.basic_get(queue=queue, no_ack=True) build_request = jsonpickle.decode(tag[-1]) job_id = build_request.get_job_id() self.assertEquals(job_id, 36) package = build_request.get_package() self.assertEquals(package, 'col10642') version = build_request.get_version() self.assertEquals(version, '1.2')
def fromFile(filename, no_practice=False): """ Read experiment log from file. :param filename: :param no_practice: :return: """ from os import path if str.startswith(path.split(filename)[-1], "P2P"): return fromFile_p2p(filename) lines = open(filename).readlines() lines = [refactor_old_references(line) for line in lines] images = jsonpickle.decode(lines[0]) responses = recursive_decode(lines[1]) test_results = jsonpickle.decode(lines[2]) test_results = _expandTestResults(test_results, images) responses = _expandResponsesNew(responses, images) if not no_practice: responses_practice = recursive_decode(lines[3]) test_results_practice = jsonpickle.decode(lines[4]) test_results_practice = _expandTestResults(test_results_practice, images) responses_practice = _expandResponsesNew(responses_practice, images) return responses, test_results, responses_practice, test_results_practice, images return responses, test_results, None, None, images
def test_new_mitigate_post(self): method = 'test_new_mitigate_post' url = '/api/responses' self.logger.info('[%s] URL: %s', method, url) new_response_body = self.prepare_json() self.app.delete('/api/responses/name/%s?session_id=test' % quote(self.prepare_new_response().theName)) rv = self.app.post(url, content_type='application/json', data=new_response_body) if (sys.version_info > (3,)): responseData = rv.data.decode('utf-8') else: responseData = rv.data self.logger.debug('[%s] Response data: %s', method, responseData) json_resp = jsonpickle.decode(responseData) self.assertIsNotNone(json_resp, 'No results after deserialization') msg = json_resp.get('message', None) self.assertIsNotNone(msg, 'No message returned') self.logger.info('[%s] Message: %s\n', method, msg) rv = self.app.post('/api/responses/name/%s/generate_goal?session_id=test' % quote(self.prepare_new_response().theName)) if (sys.version_info > (3,)): responseData = rv.data.decode('utf-8') else: responseData = rv.data self.logger.debug('[%s] Response data: %s', method, responseData) json_resp = jsonpickle.decode(responseData) self.assertIsNotNone(json_resp, 'No results after deserialization') ackMsg = json_resp.get('message', None) self.assertEqual(ackMsg, 'Goal successfully generated') rv = self.app.delete('/api/responses/name/%s?session_id=test' % quote(self.prepare_new_response().theName))
def test_get_existing_status_with_error_in_process(self, http_client, http_server, base_url, monkeypatch, stub_isdir): message_from_process = {"msg": "My message", "stdout": "Stdout stuff", "stderr": "This was some error from stderr"} def my_get_with_error(self, pid, wrapper_type): return ProcessInfo(runfolder="foo", host="bar", state=State.ERROR, proc=None, msg=message_from_process, pid=pid) monkeypatch.setattr("siswrap.wrapper_services.ProcessService.get_status", my_get_with_error) resp = yield http_client.fetch(base_url + API_URL + "/report/status/123") assert resp.code == 200 payload = jsonpickle.decode(resp.body) print payload assert payload["pid"] == 123 assert payload["state"] == State.ERROR assert payload["msg"] == message_from_process resp = yield http_client.fetch(base_url + API_URL + "/qc/status/321") assert resp.code == 200 payload = jsonpickle.decode(resp.body) assert payload["pid"] == 321
def test_show_databases(self): method = 'test_show_databases' url = '/api/settings/database/testshowdb/create?session_id=test' rv = self.app.post(url) self.assertIsNotNone(rv.data, 'No response') if (sys.version_info > (3,)): responseData = rv.data.decode('utf-8') else: responseData = rv.data json_dict = jsonpickle.decode(responseData) message = str(json_dict['message']) self.assertGreater(message.find('successfully'), -1, 'Failed to create testshowdb') url = '/api/settings/database/default/open?session_id=test' rv = self.app.post(url) self.assertIsNotNone(rv.data, 'No response') if (sys.version_info > (3,)): responseData = rv.data.decode('utf-8') else: responseData = rv.data json_dict = jsonpickle.decode(responseData) message = str(json_dict['message']) self.assertGreater(message.find('successfully'), -1, 'Failed to open default databases') rv = self.app.get('/api/settings/databases?session_id=test') if (sys.version_info > (3,)): responseData = rv.data.decode('utf-8') else: responseData = rv.data dbs = jsonpickle.decode(responseData) self.assertIsNotNone(dbs, 'No results after deserialization') self.assertIsInstance(dbs, list, 'The result is not a list as expected') self.assertGreater(len(dbs), 0, 'No databases in the list') self.assertEqual('testshowdb' in dbs, True)
def deserialize(self): cache_loaded = False if os.path.exists(self.server_fname()) and not os.path.isdir(self.backup): try: self.memcache = { "server" : {}, "users" : {} } with open(self.server_fname()) as backupfile: print ("Attempting to reload cache") self.memcache['server'] = jsonpickle.decode(backupfile.read()) print ("Server cache loaded", json.dumps(self.memcache, indent=4)) for user in self.memcache['server']['user_list']: # Try to load as much user data as possible if os.path.exists(self.user_fname(user)): print ("found path for user", user) with open(self.user_fname(user)) as userfile: user_data = jsonpickle.decode(userfile.read()) self.memcache['users'][user] = user_data cache_loaded = True except Exception as e: print ("Cache file corrupted...") raise e if not cache_loaded: print ("Cache could not be loaded") pass else: print ("CACHE LOADED SUCCESSFULLY!")
def load_server_settings(root_dir, config): assert(hasattr(config, 'registry')) assert(hasattr(config.registry, 'settings')) log = logging.getLogger(__name__) for root, dirs, files in os.walk(root_dir): root = os.path.abspath(root) if '.settings.json' in files: lastfolder = os.path.abspath(root + '/..') last_settings = dict() if lastfolder in config.registry.settings['directory_settings']: last_settings = config.registry.settings['directory_settings'][lastfolder] config.registry.settings['directory_settings'][root] = last_settings filename = root + '/.settings.json' with open(os.path.join(filename), "r") as myfile: data = myfile.read() settings_struct = jsonpickle.decode(data) if not isinstance(settings_struct, dict): settings_struct = jsonpickle.decode(settings_struct) try: settings_struct.update(config.registry.settings['directory_settings'][root]) config.registry.settings['directory_settings'][root] = settings_struct config.registry.settings['directory_settings'][root]['reload'] = config.registry.settings[ 'reload_templates'] config.registry.settings['directory_settings'][root]['path'] = filename except Exception as e: log.error(e.message) else: path = os.path.abspath(root + '/..') if path in config.registry.settings['directory_settings']: config.registry.settings['directory_settings'][root] = \ config.registry.settings['directory_settings'][path]
def test_x_put(self): method = 'test_x_put' url = '/api/requirements' rv = self.app.get('/api/requirements?session_id=test') reqs = jsonpickle.decode(rv.data) requirement = reqs.get(self.new_requirement.theDescription) upd_requirement = self.new_requirement upd_requirement.theName = 'Test2' upd_requirement.theId = requirement['theId'] upd_requirement_dict = self.new_requirement_dict upd_requirement_dict['object'] = upd_requirement upd_requirement_body = jsonpickle.encode(upd_requirement_dict) self.logger.info('[%s] JSON data: %s', method, upd_requirement_body) rv = self.app.put(url, content_type='application/json', data=upd_requirement_body) self.logger.debug('[%s] Response data: %s', method, rv.data) json_resp = jsonpickle.decode(rv.data) self.assertIsNotNone(json_resp, 'No results after deserialization') message = json_resp.get('message', None) self.assertIsNotNone(message, 'No message returned') self.logger.info('Message: %s', message) rv = self.app.get('/api/requirements?session_id=test') self.logger.debug('[%s] Response data: %s', method, rv.data) requirements = jsonpickle.decode(rv.data) requirement = requirements.get(upd_requirement.theDescription, None) self.assertIsNotNone(requirement, 'Requirement not updated as expected') self.logger.info('[%s] Requirement: %s [%d]\n', method, requirement['theName'], requirement['theId'])
def test_get_trace_dimensions(self): method = 'test_get_trace_dimensions' url = '/api/traces/dimensions/requirement/is_from/1?session_id=test' self.logger.info('[%s] URL: %s', method, url) rv = self.app.get(url) if (sys.version_info > (3,)): responseData = rv.data.decode('utf-8') else: responseData = rv.data fromDims = jsonpickle.decode(responseData) self.assertIsNotNone(fromDims, 'No results after deserialization') self.logger.info('[%s] Traces found: %d', method, len(fromDims)) self.assertEqual(len(fromDims),6) url = '/api/traces/dimensions/requirement/is_from/0?session_id=test' self.logger.info('[%s] URL: %s', method, url) rv = self.app.get(url) if (sys.version_info > (3,)): responseData = rv.data.decode('utf-8') else: responseData = rv.data toDims = jsonpickle.decode(responseData) self.assertIsNotNone(toDims, 'No results after deserialization') self.logger.info('[%s] Traces found: %d', method, len(toDims)) self.assertEqual(len(toDims),2)
def test_settings_put(self): url = '/api/settings?session_id=test' method = 'test_settings_put' rv = self.app.get(url) json_dict = jsonpickle.decode(rv.data) self.logger.info('[%s] Current project name: %s', method, json_dict['projectName']) settings = self.convert_to_obj(json_dict) settings.projectName = 'A new project name' new_json_dict = { 'session_id': 'test', 'object': settings } json_body = jsonpickle.encode(new_json_dict) rv = self.app.put(url, data=json_body, content_type='application/json') self.assertIsNotNone(rv.data, 'No response') new_json_dict = jsonpickle.decode(rv.data) self.assertIsInstance(new_json_dict, dict, 'Response is not a valid JSON dictionary') message = new_json_dict.get('message', None) self.assertIsNotNone(message) self.logger.info('[%s] Message: %s', method, message) rv = self.app.get(url) new_json_dict = jsonpickle.decode(rv.data) self.logger.info('[%s] New project name: %s\n', method, new_json_dict['projectName']) new_json_dict = { 'session_id': 'test', 'object': json_dict } json_body = jsonpickle.encode(new_json_dict) rv = self.app.put(url, data=json_body, content_type='application/json')
def test_create_new_project(self): url = '/api/settings/create?session_id=test' import_url = '/api/import/file/type/all' method = 'test_create_new_project' rv = self.app.post(url) self.assertIsNotNone(rv.data, 'No response') json_dict = jsonpickle.decode(rv.data) self.assertIsInstance(json_dict, dict, 'Response is not a valid JSON dictionary') self.assertTrue(json_dict.has_key('message'), 'No message in reponse') message = str(json_dict['message']) self.logger.info('[%s] Message: %s', method, message) self.assertGreater(message.find('successfully'), -1, 'Failed to create new project') fs_xmlfile = open(self.xmlfile, 'rb') file_contents = fs_xmlfile.read() data = { 'session_id': 'test', 'file': (StringIO(file_contents), 'import.xml') } rv = self.app.post(import_url, data=data, content_type='multipart/form-data') self.assertIsNotNone(rv.data, 'No response after reimporting model') json_dict = jsonpickle.decode(rv.data) self.assertIsInstance(json_dict, dict, 'Response is not a valid JSON dictionary') assert isinstance(json_dict, dict) message = json_dict.get('message', None) self.assertIsNotNone(message, 'No message in response') self.assertGreater(message.find('0'), -1, 'Failed to import any data') self.logger.info('[%s] Successfully created new project and restored the example project\n', method)
def job_archive(): """ Returns the job archive from the database :return: """ jobs_stmt = DBSession.query(Job).order_by(Job.updatetime.desc()).all() jobs_dict = [] for jobs in jobs_stmt: job_outputs = DBSession.query(JobOutput).filter(JobOutput.jobid == jobs.id).all() jobs = jobs.__dict__ if 'jobinfo' in jobs and jobs['jobinfo'] is not None: obj = jsonpickle.decode(jobs['jobinfo']) try: jobs.update(obj) except BaseException as e: print(str(e)) if 'jobdetails' in jobs and jobs['jobdetails'] is not None: obj = jsonpickle.decode(jobs['jobdetails']) try: jobs.update(obj) except BaseException as e: print(str(e)) jobs['number_of_joboutputs'] = len(job_outputs) jobs['joboutputs'] = [] for outputs in job_outputs: jobs['joboutputs'].append(outputs.__dict__) jobs_dict.append(jobs) return jobs_dict
def test_get_risk_threat_level_by_environment(self): method = 'test_get_risk_level_by_environment' url = '/api/risk_level/asset/threat_type/ICT%20Application/Enumeration/environment/Day?session_id=test' self.logger.info('[%s] URL: %s', method, url) rv = self.app.get(url) if (sys.version_info > (3,)): responseData = rv.data.decode('utf-8') else: responseData = rv.data level = jsonpickle.decode(responseData) self.assertIsNotNone(level, 'No results after deserialization') self.assertIsInstance(level, int, 'The result is not an integer as expected') self.assertEqual(level, 9) url = '/api/risk_level/asset/threat_type/ICT%20Application/Enumeration/environment/Night?session_id=test' self.logger.info('[%s] URL: %s', method, url) rv = self.app.get(url) if (sys.version_info > (3,)): responseData = rv.data.decode('utf-8') else: responseData = rv.data level = jsonpickle.decode(responseData) self.assertIsNotNone(level, 'No results after deserialization') self.assertIsInstance(level, int, 'The result is not an integer as expected') self.assertEqual(level, 0)
def new_recursive_decode(string, verbose=False): def print_(stuff): if verbose: print stuff obj = string if (isinstance(string, str) or isinstance(string, unicode)) \ and "py/object" in string: print_("Decoding string: {}".format(string)) obj = jsonpickle.decode(string) if isinstance(obj, dict): if ("py/object" in obj): print_("Detected a dict with py/object as key: {}".format(obj)) new_obj = {} for k, v in obj.items(): try: new_obj[k] = jsonpickle.decode(v) except: new_obj[k] = new_recursive_decode(v) if isinstance(obj, list): obj = [new_recursive_decode(o) for o in obj] if verbose: print_("Returning object: {}".format(obj)) return obj
def test_x_put(self): method = 'test_x_put' url = '/api/requirements?asset=%s' % quote(self.existing_asset_name) rv = self.app.post(url, content_type='application/json', data=self.new_requirement_body) upd_requirement = self.new_requirement upd_requirement['theName'] = 'Test2' upd_requirement_dict = self.new_requirement_dict upd_requirement_dict['object'] = upd_requirement upd_requirement_body = json.dumps(upd_requirement_dict) self.logger.info('[%s] JSON data: %s', method, upd_requirement_body) rv = self.app.put(url, content_type='application/json', data=upd_requirement_body) if (sys.version_info > (3,)): responseData = rv.data.decode('utf-8') else: responseData = rv.data self.logger.debug('[%s] Response data: %s', method, responseData) json_resp = jsonpickle.decode(responseData) self.assertIsNotNone(json_resp, 'No results after deserialization') message = json_resp.get('message', None) self.assertIsNotNone(message, 'No message returned') self.logger.info('Message: %s', message) rv = self.app.get('/api/requirements?session_id=test') if (sys.version_info > (3,)): responseData = rv.data.decode('utf-8') else: responseData = rv.data self.logger.debug('[%s] Response data: %s', method, responseData) requirements = jsonpickle.decode(responseData) requirement = requirements[0] self.assertIsNotNone(requirement, 'Requirement not updated as expected') self.logger.info('[%s] Requirement: %s\n', method, requirement['theName'])
def save_experiment(request): # method to save ENA experiment object(s). One experiment object is created for each panel on the # front-end (although as far as the users are concerned, multiple panels can belong to the same experiment # certain attributes are shared between the different experiments generated by the front end common = jsonpickle.decode(request.POST.get('common')) # others are particular to the individual object per_panel = jsonpickle.decode(request.POST.get('per_panel')) if (per_panel['experiment_id'] == ''): # if we are dealing with a new experiment (i.e. no id has been supplied) # then create a new object experiment_id = EnaCollection().add_experiment_to_study(per_panel, common, request.session["study_id"]) else: # else retrieve the existing object experiment_id = EnaCollection().update_experiment_in_study(per_panel, common, request.session["study_id"]) # here we need to loop through per_file.files adding object to exp files list for k in range(0, len(per_panel['files'])): c = ChunkedUpload.objects.get(id=int(per_panel['files'][k])) if len(per_panel['hashes']) > k: hash = per_panel['hashes'][k] else: hash = '' EnaCollection().add_file_to_study(request.session['study_id'], experiment_id, c.id, hash) out = {'experiment_id': experiment_id} return HttpResponse(jsonpickle.encode(experiment_id), content_type='text/plain')
def broker_owner(gameid, placeid): """ GET: gets a owner PUT: trade the place by changing the owner POST: buy the place in question. it will fail if it is not for sale """ if request.method == 'GET': estate = get_place(gameid, placeid) if estate: return make_response(estate.owner, 200) elif request.method == 'PUT': player = jsonpickle.decode(request.data) estate = get_place(gameid, placeid) estate.owner = player['id'] return make_response('', 200) elif request.method == 'POST': player = jsonpickle.decode(request.data) estate = get_place(gameid, placeid) visit = Broker.get(gameid).visits.get(player['id'], None) if (estate and estate.owner == '' and visit and visit == estate.place): if buy_estate(gameid, player, estate): return make_response('', 200) return make_response('''The place is not for sale - either not buyable or already sold (Conflict)''', 409)
def process_task(body): new_stdin = sys.stdin try: fileno = sys.stdin.fileno() if fileno is not None: try: new_stdin = os.fdopen(os.dup(fileno)) except OSError, e: # couldn't dupe stdin, most likely because it's # not a valid file descriptor, so we just rely on # using the one that was passed in pass except ValueError: # couldn't get stdin's fileno, so we just carry on pass shared_loader_obj = SharedPluginLoaderObj() loader = DataLoader(vault_password=None) task_json = json.loads(body) loader.set_basedir(task_json['base_dir']) host = jsonpickle.decode(task_json['host']) task = jsonpickle.decode(task_json['task']) job_vars = jsonpickle.decode(task_json['task_vars']) connection_info = jsonpickle.decode(task_json['conn_info']) task.set_loader(loader) new_connection_info = connection_info.set_task_and_host_override(task=task, host=host) executor_result = TaskExecutor(host, task, job_vars, new_connection_info, new_stdin, loader, shared_loader_obj).run() task_result = TaskResult(host, task, executor_result) task_pickled = jsonpickle.encode(task_result) return(json.dumps(task_pickled))
def arrange(index): try: places = jsonpickle.decode(session['cache']['places']) swaps = deque(jsonpickle.decode(session['cache']['swaps'])) except KeyError: return redirect('/swap/{}'.format(index), 302) try: index = int(index) places_copy = [] places_copy.append(places[index]) if not places[index] == places[-1]: for place in places[index+1:]: places_copy.append(place) for place in places[:index]: places_copy.append(place) places = places_copy _process_ratings(places) session['cache']['places'] = jsonpickle.encode(places) waypoints = _stringify([unicode(u' '.join([place.street, place.city, place.country]).replace("'", "\\'")) for place in places]) session['cache']['waypoints'] = waypoints except Exception as e: traceback.print_exc() return render_template('places.html', places=places, swaps=swaps, waypoints_list=waypoints, mobile=_is_mobile(request)) return render_template('places.html', places=places, swaps=swaps, waypoints_list=waypoints, mobile=_is_mobile(request))
def test_put(self): method = 'test_put' url = '/api/domainproperties' self.logger.info('[%s] URL: %s', method, url) new_domainproperty_body = self.prepare_json() rv = self.app.post(url, content_type='application/json', data=new_domainproperty_body) self.logger.debug('[%s] Response data: %s', method, rv.data) json_resp = jsonpickle.decode(rv.data) self.assertIsNotNone(json_resp, 'No results after deserialization') domainproperty_to_update = self.prepare_new_domainproperty() domainproperty_to_update.theName = 'Edited test domainproperty' upd_env_body = self.prepare_json(domainproperty=domainproperty_to_update) rv = self.app.put('/api/domainproperties/name/%s?session_id=test' % quote(self.prepare_new_domainproperty().name()), data=upd_env_body, content_type='application/json') self.assertIsNotNone(rv.data, 'No response') json_resp = jsonpickle.decode(rv.data) self.assertIsNotNone(json_resp) self.assertIsInstance(json_resp, dict) message = json_resp.get('message', None) self.assertIsNotNone(message, 'No message in response') self.logger.info('[%s] Message: %s', method, message) self.assertGreater(message.find('successfully updated'), -1, 'The domainproperty was not successfully updated') rv = self.app.get('/api/domainproperties/name/%s?session_id=test' % quote(domainproperty_to_update.name())) upd_domainproperty = jsonpickle.decode(rv.data) self.assertIsNotNone(upd_domainproperty, 'Unable to decode JSON data') self.logger.debug('[%s] Response data: %s', method, rv.data) self.logger.info('[%s] Domain Property: %s [%d]\n', method, upd_domainproperty['theName'], upd_domainproperty['theId']) rv = self.app.delete('/api/domainproperties/name/%s?session_id=test' % quote(domainproperty_to_update.theName))
def test_muffin_redis_cache(loop, app, client): assert app.ps.redis_cache assert app.ps.redis_cache.conn response = client.get('/cached') assert response.status_code == 200 assert 'key' in response.json response = client.get('/cached_keyprefix') assert response.status_code == 200 assert 'firstname' in response.json @asyncio.coroutine def exist_key_in_redis(): return (yield from app.ps.redis_cache.get('view/cached')) result = loop.run_until_complete(exist_key_in_redis()) assert jsonpickle.decode(result) == {'key': 'value'} @asyncio.coroutine def exist_key_in_redis(): return (yield from app.ps.redis_cache.get('custom_keyprefix')) result = loop.run_until_complete(exist_key_in_redis()) assert jsonpickle.decode(result) == {'firstname': 'Mike', 'gender': 'male'}
def test_new_mitigate_post(self): method = 'test_new_mitigate_post' url = '/api/responses' self.logger.info('[%s] URL: %s', method, url) new_response_body = self.prepare_json() self.app.delete('/api/responses/name/%s?session_id=test' % quote(self.prepare_new_response().theName)) rv = self.app.post(url, content_type='application/json', data=new_response_body) self.logger.debug('[%s] Response data: %s', method, rv.data) json_resp = jsonpickle.decode(rv.data) self.assertIsNotNone(json_resp, 'No results after deserialization') env_id = json_resp.get('response_id', None) self.assertIsNotNone(env_id, 'No response ID returned') self.assertGreater(env_id, 0, 'Invalid response ID returned [%d]' % env_id) self.logger.info('[%s] Response ID: %d\n', method, env_id) rv = self.app.post('/api/responses/name/%s/generate_goal?session_id=test' % quote(self.prepare_new_response().theName)) self.logger.debug('[%s] Response data: %s', method, rv.data) json_resp = jsonpickle.decode(rv.data) self.assertIsNotNone(json_resp, 'No results after deserialization') ackMsg = json_resp.get('message', None) self.assertEqual(ackMsg, 'Goal successfully generated') rv = self.app.delete('/api/responses/name/%s?session_id=test' % quote(self.prepare_new_response().theName))
def test_put(self): method = 'test_put' rv = self.app.post('/api/trust_boundaries?session_id=test', content_type='application/json', data=self.prepare_json()) url = '/api/trust_boundaries/name/Shibboleth' self.logger.info('[%s] URL: %s', method, url) upd_body = self.prepare_json(trust_boundary=self.prepare_updated_trust_boundary()) rv = self.app.put('/api/trust_boundaries/name/Shibboleth?session_id=test', data=upd_body, content_type='application/json') self.assertIsNotNone(rv.data, 'No response') if (sys.version_info > (3,)): responseData = rv.data.decode('utf-8') else: responseData = rv.data json_resp = jsonpickle.decode(responseData) self.assertIsNotNone(json_resp) self.assertEqual(json_resp['message'],'TrustBoundary successfully updated') rv = self.app.get('/api/trust_boundaries/name/Shibboleth?session_id=test') self.assertIsNotNone(rv.data, 'No response') if (sys.version_info > (3,)): responseData = rv.data.decode('utf-8') else: responseData = rv.data self.logger.debug('[%s] Response data: %s', method, responseData) upd_tb = jsonpickle.decode(responseData) self.assertIsNotNone(upd_tb, 'No results after deserialization') self.assertEqual(upd_tb['theName'],'Shibboleth') self.assertEqual(upd_tb['theDescription'],'Identity provider') self.assertEqual(upd_tb['theEnvironmentProperties'][0]['theComponents'][0]['theName'],'Authenticate Researcher') self.assertEqual(upd_tb['theEnvironmentProperties'][0]['theComponents'][0]['theType'],'process') rv = self.app.delete('/api/trust_boundaries/name/Shibboleth?session_id=test')
def test_generate_obstacle_from_exception(self): method = 'test_generate_obstacle_from_exception' url = '/api/usecases/name/%s?session_id=test' % quote(self.existing_usecase_name) rv = self.app.get(url) if (sys.version_info > (3,)): responseData = rv.data.decode('utf-8') else: responseData = rv.data uc = jsonpickle.decode(responseData) url = '/api/usecases/environment/Psychosis/step/' + quote('Researcher does something') + '/exception/anException/generate_obstacle?session_id=test' existing_uc_dict = { 'session_id': 'test', 'object': uc } rv = self.app.post(url, content_type='application/json', data=jsonpickle.encode(existing_uc_dict)) self.assertIsNotNone(rv.data, 'No response') if (sys.version_info > (3,)): responseData = rv.data.decode('utf-8') else: responseData = rv.data json_resp = jsonpickle.decode(responseData) self.assertIsNotNone(json_resp) self.assertIsInstance(json_resp, dict) message = json_resp.get('message', None) self.assertIsNotNone(message, 'No message in response') self.logger.info('[%s] Message: %s', method, message) self.assertGreater(message.find('generated from exception'), -1, 'The obstacle was not generated')
def test_delete(self): method = 'test_delete' rv = self.app.get('/api/persona_characteristics/name/Managers%20delegate%20security%20decisions?session_id=test') if (sys.version_info > (3,)): responseData = rv.data.decode('utf-8') else: responseData = rv.data pc = jsonpickle.decode(responseData) pc['theCharacteristicSynopsis'] = {"theActor" : "Claire", "theActorType" : "persona", "theSynopsis" : "Security delegated", "theDimension" : "goal"} pcDict = {'session_id' : 'test','object' : pc} rv = self.app.put('/api/persona_characteristics/name/Managers%20delegate%20security%20decisions?session_id=test', content_type='application/json', data=jsonpickle.encode(pcDict)) url = '/api/usecases/name/%s?session_id=test' % quote(self.prepare_new_usecase().name()) new_usecase_body = self.prepare_json() self.app.delete(url) self.logger.info('[%s] Object to delete: %s', method, new_usecase_body) self.app.post('/api/usecases', content_type='application/json', data=new_usecase_body) self.logger.info('[%s] URL: %s', method, url) rv = self.app.delete(url) if (sys.version_info > (3,)): responseData = rv.data.decode('utf-8') else: responseData = rv.data self.logger.info('[%s] Response data: %s', method, responseData) self.assertIsNotNone(responseData, 'No response') json_resp = jsonpickle.decode(responseData) self.assertIsInstance(json_resp, dict, 'The response cannot be converted to a dictionary') message = json_resp.get('message', None) self.assertIsNotNone(message, 'No message in response') self.logger.info('[%s] Message: %s\n', method, message)
def get_context_data(self, **kwargs): user = self.request.session['user'] context = super(LoginRequired, self).get_context_data(**kwargs) context['user'] = jsonpickle.decode(user) context['baiguullaga'] = jsonpickle.decode(user).user_id.position_id.dep_id.baiguullaga context['tasag'] = Tasag.objects.filter(baiguullaga = context['baiguullaga']) context['albantushaal'] = AlbanTushaal.objects.filter(dep_id = context['tasag']) context['ajiltan'] = Ajiltan.objects.filter(position_id = context['albantushaal']) return context
def _add_booking(my_request): logger.info("Attempt to make a booking") required_kit = jsonpickle.decode(my_request.form["required_kit"]) #todo fail early user = jsonpickle.decode(my_request.form["user"]) duration = jsonpickle.decode(my_request.form["duration"]) logger.info("'%s' requested %s for %ss", user, required_kit, duration) bookings = manager.add_booking(required_kit, user, duration) return bookings
def test_json(self): expect = self.obj pickle = jsonpickle.encode(self.obj) actual = jsonpickle.decode(pickle) self.assertEqual(actual.name, expect.name) self.assertEqual(actual.child, expect.child) actual = jsonpickle.decode(self.expected_json) self.assertEqual(self.obj.name, actual.name) self.assertEqual(type(self.obj), type(actual))
def assertEncodeDecode(self, json_input): expect = SAMPLE_DATA actual = jsonpickle.decode(json_input) self.assertEqual(expect['things'][0].name, actual['things'][0].name) self.assertEqual(expect['things'][0].child, actual['things'][0].child) pickled = jsonpickle.encode(SAMPLE_DATA) actual = jsonpickle.decode(pickled) self.assertEqual(expect['things'][0].name, actual['things'][0].name) self.assertEqual(expect['things'][0].child, actual['things'][0].child)
import json, jsonpickle a1 = """{ "score ":-0.9719291925430298, "answer ": "可以关注微信公众号‘’京东家电小秘书‘’点击页面底部菜单选项;一点无忧‘自助办理、或点击召唤小秘书联系家电专属京东客服进行人工咨询哦", "question ": "京东客服在哪里 ", "sourceList ":[ "cluster "], "optional ":{ "qwords ": "京东 客服 在 哪里 ", "awords ": "亲 可以 关注 微信 公众 号 ‘ ’ 京东 家电 小 秘书 ‘ ’ 点击 页面 底部 菜单 选项 ; 一点 无忧 ‘ 自助 办理 、 或 点击 召唤 小 秘书 联系 家电 专属 京东 客服 进行 人工 咨询 哦 ` , "}}""" b1 = jsonpickle.decode(a1) print(b1["answer "]) a2 = """{ "score ":-0.9719291925430298, "answer ": "可以关注微信公众号‘’京东家电小秘书‘’点击页面底部菜单选项;一点无忧‘自助办理、或点击召唤小秘书联系家电专属京东客服进行人工咨询哦", ", "question ": "京东客服在哪里 ", "sourceList ":[ "cluster "], "optional ":{ "qwords ": "京东 客服 在 哪里 ", "awords ": "亲 可以 关注 微信 公众 号 ‘ ’ 京东 家电 小 秘书 ‘ ’ 点击 页面 底部 菜单 选项 ; 一点 无忧 ‘ 自助 办理 、 或 点击 召唤 小 秘书 联系 家电 专属 京东 客服 进行 人工 咨询 哦 ` , "}}""" b1 = jsonpickle.decode(a2) a2 = json.dumps(a2) b2 = jsonpickle.decode(a2) print(b2["answer "])
def scanSingleHost(user, last_host, last_open_ports, last_secured_ports, last_settings): ScanStatus().save() user = jp.decode(user) last_host = jp.decode(last_host) last_open_ports = jp.decode(last_open_ports) last_secured_ports = jp.decode(last_secured_ports) last_settings = jp.decode(last_settings) secure_proxy = last_host.secure_proxy_ip.split(":")[0] secure_port = int(last_host.secure_proxy_ip.split(":")[1]) unsecure_proxy = last_host.unsecure_proxy_ip.split(":")[0] unsecure_port = int(last_host.unsecure_proxy_ip.split(":")[1]) if len(last_open_ports.unsecured_ports) > 0: open_ports = [ int(x.strip()) for x in last_open_ports.unsecured_ports.split(",") ] else: open_ports = [] mulScan_unsecuredPorts = MultiScan(targets=[last_host.ip], ports=open_ports, threads=last_settings.threads, timeout=last_settings.timeout, proxy_ip=[secure_proxy, unsecure_proxy], proxy_port=[secure_port, unsecure_port]) stat = ScanStatus.objects.filter().last() open_port_res = dict(mulScan_unsecuredPorts.run_proxy_scan(False)) open_res_write = OpenPortResult( added_by=User.objects.get(username=user.username), scanned_on=datetime.now(), host=Host.objects.get(host_id=last_host.host_id), open_ports=", ".join([ str(x) for x in open_port_res[unsecure_proxy + "::" + last_host.ip]["Opened Ports"] ]), closed_ports=", ".join([ str(x) for x in open_port_res[unsecure_proxy + "::" + last_host.ip]["Closed Ports"] ]), runtime=open_port_res[unsecure_proxy + "::" + last_host.ip]["Runtime"]) open_res_write.save() ScanStatus.objects.update_or_create(status_id=stat.status_id, defaults={'open_scan_status': True}) if len(last_secured_ports.secured_ports) > 0: secured_ports = [ int(x.strip()) for x in last_secured_ports.secured_ports.split(",") ] else: secure_port = [] mulScan_securedPorts = MultiScan(targets=[last_host.ip], ports=secured_ports, threads=last_settings.threads, timeout=last_settings.timeout, proxy_ip=[secure_proxy, unsecure_proxy], proxy_port=[secure_port, unsecure_port]) secure_port_res = dict(mulScan_securedPorts.run_proxy_scan(True)) unsecure_port_res = dict(mulScan_securedPorts.run_proxy_scan(False)) secure_res_write = SecurePortResult( added_by=User.objects.get(username=user.username), scanned_on=datetime.now(), host=Host.objects.get(host_id=last_host.host_id), secure_open_ports=", ".join([ str(x) for x in secure_port_res[secure_proxy + "::" + last_host.ip]["Opened Ports"] ]), secure_closed_ports=", ".join([ str(x) for x in secure_port_res[secure_proxy + "::" + last_host.ip]["Closed Ports"] ]), secure_scan_runtime=secure_port_res[secure_proxy + "::" + last_host.ip]["Runtime"], unsecure_open_ports=", ".join([ str(x) for x in unsecure_port_res[unsecure_proxy + "::" + last_host.ip]["Opened Ports"] ]), unsecure_closed_ports=", ".join([ str(x) for x in unsecure_port_res[unsecure_proxy + "::" + last_host.ip]["Closed Ports"] ]), unsecure_scan_runtime=unsecure_port_res[unsecure_proxy + "::" + last_host.ip]["Runtime"]) secure_res_write.save() ScanStatus.objects.update_or_create(status_id=stat.status_id, defaults={'secure_scan_status': True})
def _read_json(environment, call): """Undecode the json data.""" fixture = load_fixture(f"plugwise/{environment}/{call}.json") return jsonpickle.decode(fixture)
def test_reduce_listitems_append(self): 'Test reduce with listitems set (as a generator), yielding single items' instance = PickleProtocol2ReduceListitemsAppend() encoded = jsonpickle.encode(instance) decoded = jsonpickle.decode(encoded) self.assertEqual(decoded.inner, ['foo', 'bar'])
def test_reduce_complex_zero(self): instance = 0j encoded = jsonpickle.encode(instance) decoded = jsonpickle.decode(encoded) self.assertEqual(decoded, instance)
def test_string_key_not_requiring_escape_dict_keys_with_keys_enabled(self): """test that string keys that do not require escaping are not escaped""" str_dict = {'name': [1, 2]} pickled = jsonpickle.encode(str_dict, keys=True) unpickled = jsonpickle.decode(pickled) self.assertTrue('name' in unpickled)
def test_list_roundtrip(self): data = [1, 2, 3] newdata = jsonpickle.decode(jsonpickle.encode(data)) self.assertEqual(data, newdata)
def load(self): with open("settings.json", 'r') as setting_file: obj = jsonpickle.decode(setting_file.read()) self.__dict__.update(obj) return
def test_put(self): method = 'test_put' url = '/api/dataflows' self.logger.info('[%s] URL: %s', method, url) new_dataflow_body = self.prepare_json() rv = self.app.post(url, content_type='application/json', data=new_dataflow_body) if (sys.version_info > (3, )): responseData = rv.data.decode('utf-8') else: responseData = rv.data self.logger.debug('[%s] Response data: %s', method, responseData) json_resp = jsonpickle.decode(responseData) self.assertIsNotNone(json_resp, 'No results after deserialization') dataflow_to_update = self.prepare_new_dataflow() dataflow_to_update.theName = 'Edited test dataflow' upd_env_body = self.prepare_json(dataflow=dataflow_to_update) rv = self.app.put( '/api/dataflows/name/acknowledge/environment/Psychosis?session_id=test', data=upd_env_body, content_type='application/json') self.assertIsNotNone(rv.data, 'No response') if (sys.version_info > (3, )): responseData = rv.data.decode('utf-8') else: responseData = rv.data json_resp = jsonpickle.decode(responseData) self.assertIsNotNone(json_resp) self.assertEqual(json_resp['message'], 'Edited test dataflow updated') rv = self.app.get( '/api/dataflows/name/Edited%20test%20dataflow/environment/Psychosis?session_id=test' ) if (sys.version_info > (3, )): responseData = rv.data.decode('utf-8') else: responseData = rv.data upd_dataflow = jsonpickle.decode(responseData) self.assertIsNotNone(upd_dataflow, 'Unable to decode JSON data') self.assertEqual(upd_dataflow['theName'], dataflow_to_update.name()) self.assertEqual(upd_dataflow['theEnvironmentName'], dataflow_to_update.environment()) self.assertEqual(upd_dataflow['theFromName'], dataflow_to_update.fromName()) self.assertEqual(upd_dataflow['theFromType'], dataflow_to_update.fromType()) self.assertEqual(upd_dataflow['theToName'], dataflow_to_update.toName()) self.assertEqual(upd_dataflow['theToType'], dataflow_to_update.toType()) rv = self.app.delete( '/api/dataflows/name/Edited%20test%20dataflow/environment/Psychosis?session_id=test' ) self.assertIsNotNone(rv.data, 'No response') if (sys.version_info > (3, )): responseData = rv.data.decode('utf-8') else: responseData = rv.data json_resp = jsonpickle.decode(responseData) self.assertIsNotNone(json_resp) self.assertEqual(json_resp['message'], 'Edited test dataflow deleted')
def find_repeat_detections(inputFilename, outputFilename, options=None): ##%% Input handling if options is None: options = RepeatDetectionOptions() toReturn = RepeatDetectionResults() ##%% Load file detectionResults, otherFields = load_api_results( inputFilename, normalize_paths=True, filename_replacements=options.filenameReplacements) toReturn.detectionResults = detectionResults toReturn.otherFields = otherFields ##%% Separate files into directories # This will be a map from a directory name to smaller data frames rowsByDirectory = {} # This is a mapping back into the rows of the original table filenameToRow = {} # TODO: in the case where we're loading an existing set of FPs after manual filtering, # we should load these data frames too, rather than re-building them from the input. print('Separating files into directories...') # iRow = 0; row = detectionResults.iloc[0] for iRow, row in detectionResults.iterrows(): relativePath = row['file'] dirName = os.path.dirname(relativePath) if options.nDirLevelsFromLeaf > 0: iLevel = 0 while (iLevel < options.nDirLevelsFromLeaf): iLevel += 1 dirName = os.path.dirname(dirName) assert len(dirName) > 0 if not dirName in rowsByDirectory: # Create a new DataFrame with just this row # rowsByDirectory[dirName] = pd.DataFrame(row) rowsByDirectory[dirName] = [] rowsByDirectory[dirName].append(row) assert relativePath not in filenameToRow filenameToRow[relativePath] = iRow # Convert lists of rows to proper DataFrames dirs = list(rowsByDirectory.keys()) for d in dirs: rowsByDirectory[d] = pd.DataFrame(rowsByDirectory[d]) toReturn.rowsByDirectory = rowsByDirectory toReturn.filenameToRow = filenameToRow print('Finished separating {} files into {} directories'.format( len(detectionResults), len(rowsByDirectory))) ##% Look for matches (or load them from file) dirsToSearch = list(rowsByDirectory.keys())[0:options.debugMaxDir] # length-nDirs list of lists of DetectionLocation objects suspiciousDetections = [None] * len(dirsToSearch) # Are we actually looking for matches, or just loading from a file? if len(options.filterFileToLoad) == 0: # We're actually looking for matches... print('Finding similar detections...') allCandidateDetections = [None] * len(dirsToSearch) if not options.bParallelizeComparisons: options.pbar = None # iDir = 0; dirName = dirsToSearch[iDir] for iDir, dirName in enumerate(tqdm(dirsToSearch)): allCandidateDetections[iDir] = find_matches_in_directory( dirName, options, rowsByDirectory) else: options.pbar = tqdm(total=len(dirsToSearch)) allCandidateDetections = Parallel( n_jobs=options.nWorkers, prefer='threads')(delayed(find_matches_in_directory)( dirName, options, rowsByDirectory) for dirName in tqdm(dirsToSearch)) print('\nFinished looking for similar bounding boxes') ##%% Find suspicious locations based on match results print('Filtering out repeat detections...') nImagesWithSuspiciousDetections = 0 nSuspiciousDetections = 0 # For each directory # # iDir = 51 for iDir in range(len(dirsToSearch)): # A list of DetectionLocation objects suspiciousDetectionsThisDir = [] # A list of DetectionLocation objects candidateDetectionsThisDir = allCandidateDetections[iDir] for iLocation, candidateLocation in enumerate( candidateDetectionsThisDir): # occurrenceList is a list of file/detection pairs nOccurrences = len(candidateLocation.instances) if nOccurrences < options.occurrenceThreshold: continue nImagesWithSuspiciousDetections += nOccurrences nSuspiciousDetections += 1 suspiciousDetectionsThisDir.append(candidateLocation) # Find the images corresponding to this bounding box, render boxes suspiciousDetections[iDir] = suspiciousDetectionsThisDir print( 'Finished searching for repeat detections\nFound {} unique detections on {} images that are suspicious' .format(nSuspiciousDetections, nImagesWithSuspiciousDetections)) else: print('Bypassing detection-finding, loading from {}'.format( options.filterFileToLoad)) # Load the filtering file detectionIndexFileName = options.filterFileToLoad sIn = open(detectionIndexFileName, 'r').read() suspiciousDetections = jsonpickle.decode(sIn) filteringBaseDir = os.path.dirname(options.filterFileToLoad) assert len(suspiciousDetections) == len(dirsToSearch) nDetectionsRemoved = 0 nDetectionsLoaded = 0 # We're skipping detection-finding, but to see which images are actually legit false # positives, we may be looking for physical files or loading from a text file. fileList = None if options.filteredFileListToLoad is not None: with open(options.filteredFileListToLoad) as f: fileList = f.readlines() fileList = [x.strip() for x in fileList] nSuspiciousDetections = sum([len(x) for x in suspiciousDetections]) print( 'Loaded false positive list from file, will remove {} of {} suspicious detections' .format(len(fileList), nSuspiciousDetections)) # For each directory # iDir = 0; detections = suspiciousDetections[0] for iDir, detections in enumerate(suspiciousDetections): bValidDetection = [True] * len(detections) nDetectionsLoaded += len(detections) # For each detection that was present before filtering # iDetection = 0; detection = detections[iDetection] for iDetection, detection in enumerate(detections): # Are we checking the directory to see whether detections were actually false positives, # or reading from a list? if fileList is None: # Is the image still there? imageFullPath = os.path.join( filteringBaseDir, detection.sampleImageRelativeFileName) # If not, remove this from the list of suspicious detections if not os.path.isfile(imageFullPath): nDetectionsRemoved += 1 bValidDetection[iDetection] = False else: if detection.sampleImageRelativeFileName not in fileList: nDetectionsRemoved += 1 bValidDetection[iDetection] = False # ...for each detection nRemovedThisDir = len(bValidDetection) - sum(bValidDetection) if nRemovedThisDir > 0: print('Removed {} of {} detections from directory {}'.format( nRemovedThisDir, len(detections), iDir)) detectionsFiltered = list(compress(detections, bValidDetection)) suspiciousDetections[iDir] = detectionsFiltered # ...for each directory print('Removed {} of {} total detections via manual filtering'.format( nDetectionsRemoved, nDetectionsLoaded)) # ...if we are/aren't finding detections (vs. loading from file) toReturn.suspiciousDetections = suspiciousDetections if options.bRenderHtml: # Render problematic locations with html (loop) print('Rendering html') nDirs = len(dirsToSearch) directoryHtmlFiles = [None] * nDirs if options.bParallelizeRendering: # options.pbar = tqdm(total=nDirs) options.pbar = None directoryHtmlFiles = Parallel( n_jobs=options.nWorkers, prefer='threads')(delayed(render_images_for_directory)( iDir, directoryHtmlFiles, suspiciousDetections, options) for iDir in tqdm(range(nDirs))) else: options.pbar = None # For each directory # iDir = 51 for iDir in range(nDirs): # Add this directory to the master list of html files directoryHtmlFiles[iDir] = render_images_for_directory( iDir, directoryHtmlFiles, suspiciousDetections, options) # ...for each directory # Write master html file masterHtmlFile = os.path.join(options.outputBase, 'index.html') os.makedirs(options.outputBase, exist_ok=True) toReturn.masterHtmlFile = masterHtmlFile with open(masterHtmlFile, 'w') as fHtml: fHtml.write('<html><body>\n') fHtml.write( '<h2><b>Repeat detections by directory</b></h2></br>\n') for iDir, dirHtmlFile in enumerate(directoryHtmlFiles): if dirHtmlFile is None: continue relPath = os.path.relpath(dirHtmlFile, options.outputBase) dirName = dirsToSearch[iDir] # Remove unicode characters before formatting relPath = relPath.encode('ascii', 'ignore').decode('ascii') dirName = dirName.encode('ascii', 'ignore').decode('ascii') fHtml.write('<a href={}>{}</a><br/>\n'.format( relPath, dirName)) fHtml.write('</body></html>\n') # ...if we're rendering html toReturn.allRowsFiltered = update_detection_table(toReturn, options, outputFilename) # Create filtering directory if options.bWriteFilteringFolder: print('Creating filtering folder...') dateString = datetime.now().strftime('%Y.%m.%d.%H.%M.%S') filteringDir = os.path.join(options.outputBase, 'filtering_' + dateString) os.makedirs(filteringDir, exist_ok=True) # iDir = 0; suspiciousDetectionsThisDir = suspiciousDetections[iDir] for iDir, suspiciousDetectionsThisDir in enumerate( tqdm(suspiciousDetections)): # suspiciousDetectionsThisDir is a list of DetectionLocation objects # iDetection = 0; detection = suspiciousDetectionsThisDir[0] for iDetection, detection in enumerate( suspiciousDetectionsThisDir): instance = detection.instances[0] relativePath = instance.filename inputFullPath = os.path.join(options.imageBase, relativePath) assert (os.path.isfile(inputFullPath) ), 'Not a file: {}'.format(inputFullPath) outputRelativePath = 'dir{:0>4d}_det{:0>4d}.jpg'.format( iDir, iDetection) outputFullPath = os.path.join(filteringDir, outputRelativePath) render_bounding_box(detection, inputFullPath, outputFullPath, 15) detection.sampleImageRelativeFileName = outputRelativePath # Write out the detection index detectionIndexFileName = os.path.join(filteringDir, 'detectionIndex.json') jsonpickle.set_encoder_options('json', sort_keys=True, indent=4) s = jsonpickle.encode(suspiciousDetections) with open(detectionIndexFileName, 'w') as f: f.write(s) toReturn.filterFile = detectionIndexFileName print('Done') # ...if we're writing filtering info return toReturn
def load_from_file(cls, filepath): with open(filepath, 'r') as f: return jsonpickle.decode(f.read())
def load_from_json(file): with open( os.path.join(os.path.dirname(os.path.abspath(__file__)), "data/%s.json" % file)) as f: return jsonpickle.decode(f.read())
from_experiment = Experiment.objects.get(name=from_experiment_name) except: print "From experiment not found" sys.exit(0) try: to_experiment = Experiment.objects.get(name=to_experiment_name) except: print "To experiment not found" sys.exit(0) print "Found experiments" from_documents = Document.objects.filter(experiment=from_experiment) print "Extracted {} documents from from_experiment".format( len(from_documents)) n_found = 0 for document in from_documents: to_document = Document.objects.filter(experiment=to_experiment, name=document.name) if len(to_document) == 1: print "Found match for {}".format(document.name) n_found += 1 to_document = to_document[0] to_metadata = {} from_metadata = jsonpickle.decode(document.metadata) for key, value in from_metadata.items(): to_metadata[key] = value to_document.metadata = jsonpickle.encode(to_metadata) to_document.save() print "Found {} matches".format(n_found)
def test_string_key_requiring_escape_dict_keys_with_keys_enabled(self): json_key_dict = {tags.JSON_KEY + '6': [1, 2]} pickled = jsonpickle.encode(json_key_dict, keys=True) unpickled = jsonpickle.decode(pickled, keys=True) self.assertEqual(unpickled[tags.JSON_KEY + '6'], [1, 2])
def test_builtin_error(self): expect = AssertionError json = jsonpickle.encode(expect) actual = jsonpickle.decode(json) self.assertEqual(expect, actual) self.assertTrue(expect is actual)
def test_int_dict_keys_with_keys_enabled(self): int_dict = {1000: [1, 2]} pickle = jsonpickle.encode(int_dict, keys=True) actual = jsonpickle.decode(pickle, keys=True) self.assertEqual(actual[1000], [1, 2])
def test_tuple_roundtrip(self): data = (1, 2, 3) newdata = jsonpickle.decode(jsonpickle.encode(data)) self.assertEqual(data, newdata)
def test_int_dict_keys_defaults(self): int_dict = {1000: [1, 2]} pickle = jsonpickle.encode(int_dict) actual = jsonpickle.decode(pickle) self.assertEqual(actual['1000'], [1, 2])
def test_reduce_dictitems(self): 'Test reduce with dictitems set (as a generator)' instance = PickleProtocol2ReduceDictitems() encoded = jsonpickle.encode(instance) decoded = jsonpickle.decode(encoded) self.assertEqual(decoded.inner, {'foo': 'foo', 'bar': 'bar'})
def test_None_dict_key_with_keys_enabled(self): expect = {None: None} obj = {None: None} pickle = jsonpickle.encode(obj, keys=True) actual = jsonpickle.decode(pickle, keys=True) self.assertEqual(expect, actual)
def load_config_from_file(file): config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), file) with open(config_file) as cf: configuration = jsonpickle.decode(cf.read()) return configuration
def test_unicode_dict_keys(self): uni = unichr(0x1234) pickle = jsonpickle.encode({uni: uni}) actual = jsonpickle.decode(pickle) self.assertTrue(uni in actual) self.assertEqual(actual[uni], uni)
def from_json_str(self, s): return jsonpickle.decode(s)
def test_decode(self): actual = jsonpickle.decode(self.expected_json) self.assertEqual(self.obj.name, actual.name) self.assertEqual(type(self.obj), type(actual))
def value(self) -> Any: return decode(self._value)
def test_encode_notunpicklable(self): expect = {'name': 'A name', 'child': None} pickle = jsonpickle.encode(self.obj, unpicklable=False) actual = jsonpickle.decode(pickle) self.assertEqual(expect['name'], actual['name'])
jsonpickle_numpy.register_handlers() stopwords = utils_.GitMineUtils.STOPWORDS \ + list(set(java_reserved + c_reserved + cpp_reserved + javascript_reserved + python_reserved)) min_tok_len = 3 net_size_in_days = 14 multiplicity = 1000 repo_locations = ['../data/dev_set/%s.json' % s for s in [ 'PhilJay_MPAndroidChart', 'ReactiveX_RxJava', 'palantir_plottable', 'tensorflow_tensorflow', ]] for repo_loc in repo_locations: with open(repo_loc) as f: repo = jsonpickle.decode(f.read()) with open(repo_loc[:-5] + '_truth.json') as f: truth = jsonpickle.decode(f.read()) model, dictionary, cache = generate_tfidf(repo, stopwords, min_tok_len) similarity_config = { 'dict': dictionary, 'model': model, 'min_len': min_tok_len, 'stopwords': stopwords, } fingerprint = generate_dev_fingerprint(repo) temporal_config = { 'fingerprint': fingerprint, 'net_size_in_days': net_size_in_days, }
def test_encode(self): expect = self.obj pickle = jsonpickle.encode(self.obj) actual = jsonpickle.decode(pickle) self.assertEqual(expect.name, actual.name) self.assertEqual(expect.child, actual.child)
def from_json(cls, json): return jp.decode(json)
from src.broadcast import Broadcast from src.config import * from argparse import ArgumentParser import requests import asyncio import jsonpickle parser = ArgumentParser() parser.add_argument('-n', '--nodes', default=5, type=int, help='number of nodes running') args = parser.parse_args() n = args.nodes url = f'http://{BOOTSTRAP}/get_nodes' headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} response = requests.get(url, headers) hosts = jsonpickle.decode(response.json())['data'] b = Broadcast('test') for host in hosts: b.add_peer(host) print("Starting the stress test...") responses = asyncio.run(b.broadcast('stress_test', n, 'POST'))