def test_execute(self): inPut = self.config.getInPut() expect = self.config.getExpect() print expect d = JSONDecoder() data = d.decode(inPut.get("parameters")) if inPut.get("method") == "POST": r = requests.post(inPut.get("url"), data) else: r = requests.get(inPut.get("url"), parameters=data) response = d.decode(str(r.text)) expectResponse = d.decode(expect.get("response")) print expect.get("sql") expectSql = d.decode(expect.get("sql")) print expectSql expectSqlQuery = expectSql.get("query") expectSqlResult = tuple(expectSql.get("result").encode(encoding="UTF-8", errors="strict").split(",")) conn = MySQLdb.connect(host="172.18.33.37", user="******", passwd="123456", db="meshare", port=3306) cur = conn.cursor() cur.execute(expectSqlQuery) result = cur.fetchone() self.assertTupleEqual(result, expectSqlResult) self.assertDictContainsSubset(expectResponse, response)
def change_logs(self): result = list() try: change_logs = CITaskHistoryService.get_change_log( self.ci_task_history.ChangeLog) json_decoder = JSONDecoder() if change_logs: all_resp_changes = json_decoder.decode( change_logs['change_log']) index = 1 for resp_changes in all_resp_changes: repo = resp_changes['repo'] for changes in resp_changes['changes']: temp_changelog = VM_CITaskChangeLog( changes, index, repo) result.append(temp_changelog) index = index + 1 elif self.ci_task_history.CodeVersion: all_changes = json_decoder.decode( self.ci_task_history.CodeVersion) temp_changelog = VM_CITaskChangeLog(all_changes[0], 0, "") result.append(temp_changelog) except Exception as ex: SimpleLogger.exception(ex) return result
def _geolocate(networks): if networks: p = '/maps/api/browserlocation/json?browser=sploitego&sensor=true' for n in networks: p += '&%s' % urlencode({ 'wifi': 'mac:%s|ssid:%s|ss:%s' % (_fullmac(n['mac']), n['ssid'], n['ss']) }) print p c = HTTPSConnection('maps.googleapis.com') c.request('GET', p) r = c.getresponse() if r.status == 200 and r.getheader('Content-Type').startswith( 'application/json'): j = JSONDecoder() d = j.decode(r.read()) if d['status'] == 'OK': l = d['location'] return { 'latitude': l['lat'], 'longitude': l['lng'], 'accuracy': d['accuracy'] } raise GeoLocateError('Unable to geolocate.')
def __init__(self, json_record): decoder = JSONDecoder() values = decoder.decode(json_record) self.state_code = values['stateCode'] self.country_code = values['countryCode'] self.site_num = values['siteNum'] self.parameter_code = values['parameterCode'] self.poc = values['poc'] self.lat = values['latitude'] self.lon = values['longitude'] self.datum = values['datum'] self.parameter_name = values['parameterName'] self.date_local = values['dateLocal'] self.time_local = values['timeLocal'] self.date_gmt = values['dateGMT'] self.time_gmt = values['timeGMT'] self.sample_measurement = values['sampleMeasurement'] self.units_of_measure = values['unitsOfMeasure'] self.mdl = values['mdl'] self.uncertainty = values['uncertainty'] self.qualifier = values['qualifier'] self.method_type = values['methodType'] self.method_code = values['methodCode'] self.method_name = values['methodName'] self.state_name = values['stateName'] self.county_name = values['countyName'] self.date_of_last_change = values['dateOfLastChange'] self.prediction = float(values['prediction']) self.error = float(values['error']) self.anomaly = float(values['anomaly']) self.prediction_next = float(values['predictionNext'])
def loads(s): decoder = JSONDecoder(object_hook=_object_hook, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None) return decoder.decode(s)
def getjobinfo(servername, joburl): conn = httplib2.HTTPConnectionWithTimeout(servername) conn.request("GET", joburl + "api/json") r1 = conn.getresponse() data1 = r1.read() json_decoder = JSONDecoder() conn.close() return json_decoder.decode(data1.decode())
def getjenkinsjobs(jenkinsserver): conn = httplib2.HTTPConnectionWithTimeout(jenkinsserver) conn.request("GET", "/jenkins/api/json") r1 = conn.getresponse() data1 = r1.read() json_decoder = JSONDecoder() conn.close() return json_decoder.decode(data1.decode())
def parseCredentials(pwd, file="secret_credentials.json"): jsondec = JSONDecoder() text = encryption.decrypt(password=pwd) or "{}" data = jsondec.decode(text) print("Opening credential file...") return data.get("SERVER", "example.com"), data.get("PORT", "0"), data.get( "USER", "*****@*****.**"), data.get("PASSWORD", "admin")
def perform_destroy(self, instance): form_data = self.request.POST.get('models', None) if form_data != None: json_decoder = JSONDecoder() validate_data = json_decoder.decode(form_data) task_dependency_id = int(validate_data.get('id', 0)) else: task_dependency_id = int(self.kwargs['id']) TaskService.delete_task_dependency(int(task_dependency_id))
def get_object(self): form_data = self.request.POST.get('models', None) if form_data != None: json_decoder = JSONDecoder() validate_data = json_decoder.decode(form_data) task_id = int(validate_data.get('id', 0)) else: task_id = int(self.kwargs['task_id']) task = models.Task.objects.get(task_id) return task
def getCategoriesForDocument(document): logger = logging.getLogger("DocumentCategory.getCategoriesForDocument") categories = None try : documentCount = DocumentCategoryCounts.objects.get(document=document) jsonDecoder = JSONDecoder() categories = jsonDecoder.decode(documentCount.countData) categories = ClassifierCategory.getCategoriesByIds(categories.keys()) except Exception, ex : logger.exception("Failed to retrieve the categories for the document" + str(ex))
def decode(self, s): o = JSONDecoder.decode(self, s) pickle_str = o.get(JSONCodec._obj, None) if pickle_str: # file_ver = o[JSONCodec._ver_key] # if file_ver != JSONCodec._ver: # msg = 'Unsopported json-encoded version(%s != %s)!' # raise ValueError(msg % (file_ver, JSONCodec._ver)) pickle_bytes = binascii.a2b_qp(pickle_str.encode(encoding="utf8")) o = pickle.loads(pickle_bytes) return o
def decode(self, s,): o = JSONDecoder.decode(self, s) pickle_str = o.get(JSONCodec._obj, None) if pickle_str: #file_ver = o[JSONCodec._ver_key] # if file_ver != JSONCodec._ver: # msg = 'Unsopported json-encoded version(%s != %s)!' # raise ValueError(msg % (file_ver, JSONCodec._ver)) pickle_bytes = binascii.a2b_qp(pickle_str.encode( encoding='utf8')) o = pickle.loads(pickle_bytes) return o
def __checkShutdown(self, job): shutdown = False decoder = JSONDecoder() if job.stats()['tube'] == "shutdown" : shutdown = True args = decoder.decode(job.body) time = args['timestamp'] if time > self.__cTime : sys.exit(0) job.delete() return shutdown
def post(self, request, *args, **kwargs): form_data = request.POST.get('models', None) if form_data == None: validate_data = request.data else: json_decoder = JSONDecoder() validate_data = json_decoder.decode(form_data) task = TaskService.create_task(validate_data, request.user) serializer = project_serializer.ProjectTaskSerializer(instance=task) headers = self.get_success_headers(serializer.data) return response.Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
def update(self, request, *args, **kwargs): partial = kwargs.pop('partial', False) form_data = request.POST.get('models', None) if form_data == None: validate_data = request.POST or request.data else: json_decoder = JSONDecoder() validate_data = json_decoder.decode(form_data) task = self.get_object() print(validate_data) TaskService.edit_task(task, validate_data, request.user) serializer = project_serializer.ProjectTaskSerializer(instance=task) return response.Response(serializer.data)
class JSONSerializer(CustomizableSerializer): """ Serializes objects using JSON (JavaScript Object Notation). See the :mod:`json` module documentation in the standard library for more information on available options. Certain options can resolve references to objects: * ``encoder_options['default']`` * ``decoder_options['object_hook']`` * ``decoder_options['object_pairs_hook']`` :param encoder_options: keyword arguments passed to :class:`~json.JSONEncoder` :param decoder_options: keyword arguments passed to :class:`~json.JSONDecoder` :param encoding: the text encoding to use for converting to and from bytes :param custom_type_codec: wrapper to use to wrap custom types after marshalling """ __slots__ = ('encoder_options', 'decoder_options', 'encoding', 'custom_type_codec', '_encoder', '_decoder', '_marshallers', '_unmarshallers') def __init__(self, encoder_options: Dict[str, Any] = None, decoder_options: Dict[str, Any] = None, encoding: str = 'utf-8', custom_type_codec: Union[JSONTypeCodec, str] = None): assert check_argument_types() super().__init__(resolve_reference(custom_type_codec) or JSONTypeCodec()) self.encoding = encoding self.encoder_options = encoder_options or {} self.encoder_options['default'] = resolve_reference(self.encoder_options.get('default')) self._encoder = JSONEncoder(**self.encoder_options) self.decoder_options = decoder_options or {} self.decoder_options['object_hook'] = resolve_reference( self.decoder_options.get('object_hook')) self.decoder_options['object_pairs_hook'] = resolve_reference( self.decoder_options.get('object_pairs_hook')) self._decoder = JSONDecoder(**self.decoder_options) def serialize(self, obj) -> bytes: return self._encoder.encode(obj).encode(self.encoding) def deserialize(self, payload: bytes): payload = payload.decode(self.encoding) return self._decoder.decode(payload) @property def mimetype(self): return 'application/json'
class JSONSerializer(CustomizableSerializer): """ Serializes objects using JSON (JavaScript Object Notation). See the :mod:`json` module documentation in the standard library for more information on available options. Certain options can resolve references to objects: * ``encoder_options['default']`` * ``decoder_options['object_hook']`` * ``decoder_options['object_pairs_hook']`` :param encoder_options: keyword arguments passed to :class:`~json.JSONEncoder` :param decoder_options: keyword arguments passed to :class:`~json.JSONDecoder` :param encoding: the text encoding to use for converting to and from bytes :param custom_type_codec: wrapper to use to wrap custom types after marshalling """ __slots__ = ('encoder_options', 'decoder_options', 'encoding', 'custom_type_codec', '_encoder', '_decoder', '_marshallers', '_unmarshallers') def __init__(self, encoder_options: Dict[str, Any] = None, decoder_options: Dict[str, Any] = None, encoding: str = 'utf-8', custom_type_codec: Union[JSONTypeCodec, str] = None) -> None: assert check_argument_types() super().__init__(resolve_reference(custom_type_codec) or JSONTypeCodec()) self.encoding = encoding self.encoder_options = encoder_options or {} self.encoder_options['default'] = resolve_reference(self.encoder_options.get('default')) self._encoder = JSONEncoder(**self.encoder_options) self.decoder_options = decoder_options or {} self.decoder_options['object_hook'] = resolve_reference( self.decoder_options.get('object_hook')) self.decoder_options['object_pairs_hook'] = resolve_reference( self.decoder_options.get('object_pairs_hook')) self._decoder = JSONDecoder(**self.decoder_options) def serialize(self, obj) -> bytes: return self._encoder.encode(obj).encode(self.encoding) def deserialize(self, payload: bytes): text_payload = payload.decode(self.encoding) return self._decoder.decode(text_payload) @property def mimetype(self): return 'application/json'
def main(): decoder = JSONDecoder() encoder = JSONEncoder(indent=4) # Loop over every file in the course_data folder. # These files are dictionaries of data for single courses. for course_file in os.listdir('./course_data'): with open(os.path.join('./course_data', course_file)) as c: data = decoder.decode(c.read()) if data is None: continue print('Parsing', data['course_code']) with open('./prerequisites/'+data['course_code']+'.json', 'w') as out: out.write(encoder.encode( parse_prereq(data['prerequisites'], data['course_code']) ))
def post(self, request, *args, **kwargs): form_data = request.POST.get('models', None) if form_data == None: validate_data = request.data else: json_decoder = JSONDecoder() validate_data = json_decoder.decode(form_data) task_dependency = TaskService.create_task_dependency(validate_data) serializer = project_serializer.ProjectTaskDependencySerializer( instance=task_dependency, data=validate_data) serializer.is_valid(raise_exception=True) headers = self.get_success_headers(serializer.data) return response.Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
def task_changelog_detail(self, history_id, select_version): result = None history = CITaskHistory.objects.get(int(history_id), is_active=0) change_logs = CITaskHistoryService.get_change_log(history.ChangeLog) json_decoder = JSONDecoder() if change_logs: all_resp_changes = json_decoder.decode(change_logs['change_log']) for resp_changes in all_resp_changes: repo = resp_changes['repo'] for changes in resp_changes['changes']: temp_changelog = VM_CITaskChangeLog(changes, 0, repo) if temp_changelog.version == select_version: result = temp_changelog break if result: break pagefileds = {"changefile": result} return self.get_webpart(pagefileds, CITaskPath.task_changelog_detail)
class DialogResource(Resource): IsLeaf = True def __init__(self, sm): self.sm = sm self.decoder = JSONDecoder() def render_POST(self, request): try: session_id = request.args[ARG_SESSION_ID][0] session = self.sm.get_session(session_id) if not session: return LinkError(ERROR_SESSION_NOT_FOUND).json response = request.args[ARG_DIALOG_RESPONSE][0] return session.dialog_response(self.decoder.decode(response)).json except: display = Display() display.append(DisplayLine(traceback.format_exc(), 0xff0000)) return display.json
def update(self, request, *args, **kwargs): partial = kwargs.pop('partial', False) form_data = request.POST.get('models', None) if form_data == None: validate_data = request.POST or request.data else: json_decoder = JSONDecoder() validate_data = json_decoder.decode(form_data) task = self.get_object() TaskService.edit_task(task, validate_data, request.user) serializer = self.get_serializer(task, data=validate_data, partial=partial) serializer.is_valid(raise_exception=True) self.perform_update(serializer) if getattr(task, '_prefetched_objects_cache', None): task._prefetched_objects_cache = {} return response.Response(serializer.data)
def post(self, request, *args, **kwargs): form_data = request.POST.get('models', None) if form_data == None: validate_data = request.data else: json_decoder = JSONDecoder() validate_data = json_decoder.decode(form_data) task_id = validate_data.get('Task') task = models.Task.objects.get(int(task_id)) owner = validate_data.get('Owner') unit = validate_data.get('Unit') task_owner = TaskService.create_task_owner(task, owner, unit) serializer = project_serializer.ProjectTaskOwnerSerializer( instance=task_owner, data=validate_data) serializer.is_valid(raise_exception=True) headers = self.get_success_headers(serializer.data) return response.Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
def _geolocate(networks): if networks: p = '/maps/api/browserlocation/json?browser=sploitego&sensor=true' for n in networks: p += '&%s' % urlencode({'wifi':'mac:%s|ssid:%s|ss:%s' % (_fullmac(n['mac']), n['ssid'], n['ss'])}) print p c = HTTPSConnection('maps.googleapis.com') c.request('GET', p) r = c.getresponse() if r.status == 200 and r.getheader('Content-Type').startswith('application/json'): j = JSONDecoder() d = j.decode(r.read()) if d['status'] == 'OK': l = d['location'] return {'latitude':l['lat'],'longitude':l['lng'],'accuracy':d['accuracy']} raise GeoLocateError('Unable to geolocate.')
def search_movie_deeper(self, date): url = "http://ticket.cgv.co.kr/CGV2011/RIA/CJ000.aspx/CJ_HP_TIME_TABLE" headers = {"User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/100.0.4692.56 Safari/537.36", "Accept":"application/json, text/javascript, */*; q=0.01", "Content-Type":"application/json", "X-Requested-With":"XMLHttpRequest"} json_body = {"REQSITE":"x02PG4EcdFrHKluSEQQh4A==","MovieGroupCd":"4h5e0F6BOQ8nzPmVqjuy+g==","TheaterCd":"LMP+XuzWskJLFG41YQ7HGA==", "PlayYMD":"QuBGrFzyucrUVVyggOh6Ig==","MovieType_Cd":"nG6tVgEQPGU2GvOIdnwTjg==","Subtitle_CD":"nG6tVgEQPGU2GvOIdnwTjg==", "SOUNDX_YN":"nG6tVgEQPGU2GvOIdnwTjg==","Third_Attr_CD":"nG6tVgEQPGU2GvOIdnwTjg==","IS_NORMAL":"nG6tVgEQPGU2GvOIdnwTjg==","Language":"zqWM417GS6dxQ7CIf65+iA=="} decoder = JSONDecoder() response = requests.post(url, headers=headers, json=json_body) content = response.content.decode('utf-8') content = decoder.decode(content)['d']['data']['DATA'] root = ET.fromstring(content) ret = [] for item in root: s = ScreenTime(tag=None) s.init_with_element(item) ret += [s] return ret
def main(): args = parse_cmdline() if args.output: output_stream = open(args.output, 'w') else: output_stream = sys.stdout results = [] decoder = JSONDecoder() content = read_webpage(args.url) userid, file_id, folder_id, file_chk, mb, app, verifycode = parse_params( content) # Query get_file_url api for file link get_file_api = "/get_file_url.php?uid=%(userid)s&fid=%(file_id)s"\ "&folder_id=%(folder_id)s&fid=%(file_id)s&file_chk=%(file_chk)s&mb=%(mb)s"\ "&app=%(app)s&verifycode=%(verifycode)s&rd=%(rd)f" % { "userid": userid, "file_id": file_id, "folder_id": folder_id, "file_chk": file_chk, "mb": mb, "app": 0, "verifycode": verifycode, "rd" : random.random(), } for ua in USER_AGENT_LIST[:args.numUserAgents]: request = Request(urljoin(args.url, get_file_api), headers={ 'User-Agent': ua, }) response_text = read_webpage(request) response_obj = decoder.decode(response_text) downurl = response_obj.get('downurl') if downurl: results.append(downurl) print(downurl, end='\t', file=output_stream) output_stream.close()
def update(self, request, *args, **kwargs): partial = kwargs.pop('partial', False) form_data = request.POST.get('models', None) if form_data == None: validate_data = request.POST or request.data else: json_decoder = JSONDecoder() validate_data = json_decoder.decode(form_data) task_owner_id = validate_data.get('id') unit = validate_data.get('Unit') task_owner = TaskService.edit_task_owner(task_owner_id, unit) serializer = self.get_serializer(instance=task_owner, data=validate_data, partial=partial) serializer.is_valid(raise_exception=True) self.perform_update(serializer) if getattr(task_owner, '_prefetched_objects_cache', None): task_owner._prefetched_objects_cache = {} # return response.Response({'1':1}) return response.Response(serializer.data)
class Connector: """description of class""" def __init__(self, api_key): self.key = api_key self.host = "api.projectoxford.ai" self.base_url = "/face/v1.0/{}" self.encoder = JSONEncoder() self.decoder = JSONDecoder() def encode_json(self, dictionary): """ encodes dictionaries to json to send to API """ return self.encoder.encode(dictionary) def decode_json(self, json): """ decodes json to a dictionary """ return self.decoder.decode(json) def send_request(self, method, url, qs_args=None, headers=None, body=None): """ Sends a request to the API. """ # Because having a dictionary as default value is dangerous if qs_args is None: qs_args = {} if headers is None: headers = {} # Check what content type header to include in the HTTP message if hasattr(body, "read") or isinstance(body, bytes): headers["Content-Type"] = "application/octet-stream" else: body = self.encode_json(body) headers["Content-Type"] = "application/json" connection = HTTPSConnection(self.host) # Format the url url = self.base_url.format(url) if len(qs_args) > 0: url += "?{}".format(urlencode(qs_args)) # Add api-key to the headers headers["Ocp-Apim-Subscription-Key"] = self.key # Send the request connection.request(method, url, headers=headers, body=body) # Read the response and try to decode JSON response = connection.getresponse() data_bytes = response.read() data = data_bytes.decode() # TODO: Except data that is not JSON if len(data) > 0: data = self.decode_json(data) return data, response
class JsonParser: def __init__(self): self.jsDecoder = JSONDecoder() self.jsEncoder = __ObjStrEncoder__() self.encoding = "utf-8" def toDict(self, targetStr, isDeepClone=False): if isinstance(targetStr, str) or isinstance(targetStr, unicode): try: return self.jsDecoder.decode(targetStr) except: pass raise ValueError('Bad JSON "%s"' % targetStr) else: import copy return copy.deepcopy(targetStr) if isDeepClone else targetStr def csvToDict(self, filepath, encoding="utf-8", isToDict=True): csvData = [] if isToDict: heads = None import codecs with codecs.open(filepath, 'r', encoding=encoding) as dataFile: import csv csvReader = csv.reader(dataFile, quoting=csv.QUOTE_MINIMAL) for row in csvReader: if isToDict: if heads == None: heads = row else: rj = {} for i in range(0, len(heads)): try: rj[heads[i]] = row[i] except: pass csvData.append(rj) else: csvData.append(row) return csvData def xmlToDict(self, filepath, propMark="Property", nameMark="name", valueMark="value", typeMark="type", hasList=False): from xml.dom.minidom import parse doml = parse(filepath) jsonMap = {} for node in doml.getElementsByTagName(propMark): tvgN = node.getAttribute(nameMark) tvgT = node.getAttribute(typeMark) tvgV = node.getAttribute(valueMark) if tvgT != "": tvgV = {"v": tvgV, "t": tvgT} if tvgN != "": if hasList and jsonMap.__contains__(tvgN): listVal = jsonMap[tvgN] if type(listVal) == list: listVal.append(tvgV) else: jsonMap[tvgN] = [listVal, tvgV] else: jsonMap[tvgN] = tvgV return jsonMap def toStr(self, obj): return self.jsEncoder.encode(obj)
def loads(s): decoder = JSONDecoder(encoding="utf-8", object_hook=_object_hook, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None) return decoder.decode(s)
class RedisStore(): def __init__(self, dispatcher, db_host, db_port, db_num, db_pw): self.dispatcher = dispatcher pool = ConnectionPool(max_connections=2, db=db_num, host=db_host, port=db_port, password=db_pw) self.redis = StrictRedis(connection_pool=pool) self.encoder = JSONEncoder() self.decoder = JSONDecoder() self.class_map = {} self.object_map = {} def create_object(self, dbo, update_rev=False): self.save_object(dbo) dbo.on_loaded() def save_object(self, dbo, update_rev=False, autosave=False): if update_rev: dbo.dbo_rev = getattr(dbo, "dbo_rev", 0) + 1 json_obj = self.build_json(dbo) key = dbo.dbo_key self.redis.set(key, self.encoder.encode(json_obj)) if dbo.dbo_set_key: self.redis.sadd(dbo.dbo_set_key, key) self.dispatcher.dispatch("db_log{0}".format("_auto" if autosave else ""), "object saved: " + key) self.object_map[dbo.dbo_key] = dbo; def build_json(self, dbo): dbo.before_save() json_obj = {} if dbo.__class__ != dbo.dbo_base_class: json_obj["class_name"] = dbo.__module__ + "." + dbo.__class__.__name__ for field_name in dbo.dbo_fields: json_obj[field_name] = getattr(dbo, field_name, None) for dbo_col in dbo.dbo_collections: coll_list = list() for child_dbo in getattr(dbo, dbo_col.field_name): if dbo_col.key_type: coll_list.append(child_dbo.dbo_id) else: coll_list.append(self.build_json(child_dbo)) json_obj[dbo_col.field_name] = coll_list for dbo_ref in dbo.dbo_refs: ref = getattr(dbo, dbo_ref.field_name, None) if ref: json_obj[dbo_ref.field_name] = ref.dbo_id return json_obj def cache_object(self, dbo): self.object_map[dbo.dbo_key] def load_cached(self, key): return self.object_map.get(key) def evict(self, dbo): try: del self.object_map[dbo.dbo_key] except: self.dispatcher.dispatch("db_log", "Failed to evict " + dbo.dbo_key + " from db cache") def load_by_key(self, key_type, key, base_class=None): dbo_key = key_type + ":" + key cached_dbo = self.object_map.get(dbo_key) if cached_dbo: return cached_dbo json_str = self.redis.get(dbo_key) if not json_str: return None json_obj = self.decoder.decode(json_str) dbo = self.load_class(json_obj, base_class)(key) if dbo.dbo_key_type: self.object_map[dbo.dbo_key] = dbo self.load_json(dbo, json_obj) return dbo def load_class(self, json_obj, base_class): class_path = json_obj.get("class_name") if not class_path: return base_class clazz = self.class_map.get(class_path) if clazz: return clazz split_path = class_path.split(".") module_name = ".".join(split_path[:-1]) class_name = split_path[-1] module = __import__(module_name, globals(), locals(), [class_name]) clazz = getattr(module, class_name) self.class_map[class_path] = clazz return clazz def load_object(self, dbo_class, key): return self.load_by_key(dbo_class.dbo_key_type, key, dbo_class) def load_json(self, dbo, json_obj): for field_name in dbo.dbo_fields: try: setattr(dbo, field_name, json_obj[field_name]) except KeyError: self.dispatcher.dispatch("db_log", "db: Object " + dbo.dbo_key + " json missing field " + field_name) for dbo_col in dbo.dbo_collections: coll = getattr(dbo, dbo_col.field_name, []) try: for child_json in json_obj[dbo_col.field_name]: if dbo_col.key_type: child_dbo = self.load_by_key(dbo_col.key_type, child_json, dbo_col.base_class) else: child_dbo = self.load_class(child_json, dbo_col.base_class)() self.load_json(child_dbo, child_json) coll.append(child_dbo) except AttributeError: self.dispatcher.dispatch("db_log", "{0} json failed to load for coll {1} in {2}".format(child_json, dbo_col.field_name, dbo.dbo_id)) except KeyError: self.dispatcher.dispatch("db_log", "db: Object " + dbo.dbo_key + " json missing collection " + dbo_col.field_name) for dbo_ref in dbo.dbo_refs: try: ref_key = json_obj[dbo_ref.field_name] ref_obj = self.load_by_key(dbo_ref.key_type, ref_key, dbo_ref.base_class) setattr(dbo, dbo_ref.field_name, ref_obj) except: self.dispatcher.dispatch("db_log", "db: Object " + dbo.dbo_key + " json missing ref " + dbo_ref.field_name) dbo.on_loaded() return True def delete_object(self, dbo): key = dbo.dbo_key self.redis.delete(key) if dbo.dbo_set_key: self.redis.srem(dbo.dbo_set_key, key) for dbo_col in dbo.dbo_collections: if dbo_col.key_type: coll = getattr(dbo, dbo_col.field_name, set()) for child_dbo in coll: self.delete_object(child_dbo) self.dispatcher.dispatch("db_log", "object deleted: " + key) if self.object_map.get(dbo.dbo_key): del self.object_map[dbo.dbo_key] return True def fetch_set_keys(self, set_key): return self.redis.smembers(set_key)
def geoip(ip=''): r = urlopen('http://smart-ip.net/geoip-json/%s' % ip) if r.code == 200: j = JSONDecoder() return j.decode(r.read()) return None
from requests import request, Response from json.decoder import JSONDecoder method_base = "https://api.vk.com/method/" decoder = JSONDecoder() with open('access.json', 'rt') as f: json = '' line = f.readline() while line: json += line line = f.readline() access = decoder.decode(json) access_token = f'access_token={access["access_token"]}&v={access["APIv"]}' def get_user(idf: str) -> dict: return request('GET', method_base + f'users.get?user_ids={idf}&{access_token}').json() def get_friends(idf: int, count: int = -1) -> dict: count_arg = f'&count={count}' if count != -1 else '' return request( 'GET', method_base + f'friends.get?user_id={idf}&{access_token}' f'&fields=nickname{count_arg}').json()
class Trainer(object): categories = {} def __init__(self): self._jsonDecoder = JSONDecoder() self._jsonEncoder = JSONEncoder() self.__featureExtractor = FeatureExtractor() def setFeatureExtractor(self, featureExtractor): self.__featuredExtractor = featureExtractor def __isNumeric(self, feature): isNumeric = False try: float(feature) isNumeric = True except ValueError: pass return isNumeric """ Given a list of yes category names, retrieves the yes/no category hash that the trainer needs """ def __getCategoriesFromNames(self, yesTagNames, noTagNames): finalCategories = [] # create the categories if they don't already exist for tagName in yesTagNames: if tagName: categoryYes, _ = ClassifierCategory.objects.get_or_create(categoryName=tagName, yes=True) categoryNo, _ = ClassifierCategory.objects.get_or_create(categoryName=tagName, yes=False) finalCategories.append(categoryYes) for tagName in noTagNames: if tagName: categoryYes, _ = ClassifierCategory.objects.get_or_create(categoryName=tagName, yes=True) categoryNo, _ = ClassifierCategory.objects.get_or_create(categoryName=tagName, yes=False) finalCategories.append(categoryNo) return finalCategories """ Trains a corpus of data. """ @transaction.commit_manually def train(self, corpus="", yesTagNames=None, noTagNames=None): logger = logging.getLogger("Trainer.train") success = False categories = [] try: document = Document.getDocumentByCorpus(corpus) if not document: features = self.__featuredExtractor.getFeatures(corpus) categories = self.__getCategoriesFromNames(yesTagNames, noTagNames) document = Document(corpus=corpus) document.save() documentCounts = {} for category in categories: self.__incrementCategoryCount(documentCounts, category) DocumentCategoryCounts(document=document, countData=self._jsonEncoder.encode(documentCounts)).save() for feature in features: featureCount, _ = FeatureCounts.objects.get_or_create(featureName=feature) counts = self._jsonDecoder.decode(featureCount.countData) if featureCount.countData else {} for category in categories: self.__incrementCategoryCount(counts, category) featureCount.countData = self._jsonEncoder.encode(counts) featureCount.save() # We keep an index of category document counts for faster classification later on catDocCountIndex = CategoryDocumentCountIndex.getCountIndex() index = self._jsonDecoder.decode(catDocCountIndex.countData) if catDocCountIndex.countData else {} for category in categories: self.__incrementCategoryCount(index, category) catDocCountIndex.countData = self._jsonEncoder.encode(index) catDocCountIndex.save() success = True transaction.commit() else: logger.info("Document already exists: " + str(document.id) + " - " + document.corpusHash) success = True except Exception, ex: logger.info("Bad data:%s" % corpus) logger.exception("Failed to save the trained data: " + str(ex)) transaction.rollback() return success
class JSONSerializer(serialize_abcs.CustomizableSerializer): """ Serializes objects using JSON (JavaScript Object Notation). See the :mod:`json` module documentation in the standard library for more information on available options. Certain options can resolve references to objects: * ``encoder_options['default']`` * ``decoder_options['object_hook']`` * ``decoder_options['object_pairs_hook']`` :param encoder_options: keyword arguments passed to :class:`~json.JSONEncoder` :param decoder_options: keyword arguments passed to :class:`~json.JSONDecoder` :param encoding: the text encoding to use for converting to and from bytes :param custom_type_key: magic key that identifies custom types in a JSON object """ __slots__ = ('encoder_options', 'decoder_options', 'encoding', 'custom_type_key', '_encoder', '_decoder', '_marshallers', '_unmarshallers') def __init__(self, encoder_options: Dict[str, Any] = None, decoder_options: Dict[str, Any] = None, encoding: str = 'utf-8', custom_type_key: str = '__type__'): self.encoding = encoding self.custom_type_key = custom_type_key self._marshallers = OrderedDict() # class -> (typename, marshaller function) self._unmarshallers = OrderedDict() # typename -> (class, unmarshaller function) self.encoder_options = encoder_options or {} self.encoder_options['default'] = resolve_reference(self.encoder_options.get('default')) self._encoder = JSONEncoder(**self.encoder_options) self.decoder_options = decoder_options or {} self.decoder_options['object_hook'] = resolve_reference( self.decoder_options.get('object_hook')) self.decoder_options['object_pairs_hook'] = resolve_reference( self.decoder_options.get('object_pairs_hook')) self._decoder = JSONDecoder(**self.decoder_options) def serialize(self, obj) -> bytes: return self._encoder.encode(obj).encode(self.encoding) def deserialize(self, payload: bytes): payload = payload.decode(self.encoding) return self._decoder.decode(payload) def register_custom_type( self, cls: type, marshaller: Optional[Callable[[Any], Any]] = default_marshaller, unmarshaller: Optional[Callable[[Any, Any], Any]] = default_unmarshaller, *, typename: str = None) -> None: typename = typename or qualified_name(cls) if marshaller: self._marshallers[cls] = typename, marshaller self.encoder_options['default'] = self._default_encoder self._encoder = JSONEncoder(**self.encoder_options) if unmarshaller: self._unmarshallers[typename] = cls, unmarshaller self.decoder_options['object_hook'] = self._custom_object_hook self._decoder = JSONDecoder(**self.decoder_options) def _default_encoder(self, obj): obj_type = obj.__class__ try: typename, marshaller = self._marshallers[obj_type] except KeyError: raise LookupError('no marshaller found for type "{}"' .format(obj_type.__class__.__name__)) from None state = marshaller(obj) return {self.custom_type_key: typename, 'state': state} def _custom_object_hook(self, obj: Dict[str, Any]): if len(obj) == 2 and self.custom_type_key in obj: typename = obj[self.custom_type_key] try: cls, unmarshaller = self._unmarshallers[typename] except KeyError: raise LookupError('no unmarshaller found for type "{}"'.format(typename)) from None instance = cls.__new__(cls) unmarshaller(instance, obj['state']) return instance else: return obj @property def mimetype(self): return 'application/json'