def testGetBasicRecForbResFromBZ(bzList): ''' in this function, bzList must be the dic--confBZRes's keys ''' rfCacheFilePath = ROOT_PATH.auto_config_root() + u'model_cache/find_cache/bzRecForbDics.json' recommend_start = time.clock() rfCacheFile = open(rfCacheFilePath, 'r') line = rfCacheFile.readline() bzRecForbDics = JSONDecoder().decode(line) rfCacheFile.close() recSCList = [] forbSCList = [] for bz in bzList: if bz in bzRecForbDics.keys(): recSCList.extend(bzRecForbDics[bz]['0']) forbSCList.extend(bzRecForbDics[bz]['1']) recSCList = list(set(recSCList)) # de-duplication, same below forbSCList = list(set(forbSCList)) recommend_end = time.clock() print('recommend foods run time: %f s' % (recommend_end - recommend_start)) print('推荐食材(基础):') for sc in recSCList: print(sc + ', '), print('\n----------------------------------------------------------') print('禁忌食材(基础):') for sc in forbSCList: print(sc + ', '), print('\n----------------------------------------------------------')
def new_decoder(): from json.decoder import JSONDecoder from json.scanner import py_make_scanner decoder = JSONDecoder() decoder.parse_object = NBDecoder decoder.scan_once = py_make_scanner(decoder) return decoder
def _geolocate(networks): if networks: p = '/maps/api/browserlocation/json?browser=sploitego&sensor=true' for n in networks: p += '&%s' % urlencode({ 'wifi': 'mac:%s|ssid:%s|ss:%s' % (_fullmac(n['mac']), n['ssid'], n['ss']) }) print p c = HTTPSConnection('maps.googleapis.com') c.request('GET', p) r = c.getresponse() if r.status == 200 and r.getheader('Content-Type').startswith( 'application/json'): j = JSONDecoder() d = j.decode(r.read()) if d['status'] == 'OK': l = d['location'] return { 'latitude': l['lat'], 'longitude': l['lng'], 'accuracy': d['accuracy'] } raise GeoLocateError('Unable to geolocate.')
def testGetBasicRecForbResFromBZ(bzList): ''' in this function, bzList must be the dic--confBZRes's keys ''' rfCacheFilePath = ROOT_PATH.auto_config_root( ) + u'model_cache/find_cache/bzRecForbDics.json' recommend_start = time.clock() rfCacheFile = open(rfCacheFilePath, 'r') line = rfCacheFile.readline() bzRecForbDics = JSONDecoder().decode(line) rfCacheFile.close() recSCList = [] forbSCList = [] for bz in bzList: if bz in bzRecForbDics.keys(): recSCList.extend(bzRecForbDics[bz]['0']) forbSCList.extend(bzRecForbDics[bz]['1']) recSCList = list(set(recSCList)) # de-duplication, same below forbSCList = list(set(forbSCList)) recommend_end = time.clock() print('recommend foods run time: %f s' % (recommend_end - recommend_start)) print('推荐食材(基础):') for sc in recSCList: print(sc + ', '), print('\n----------------------------------------------------------') print('禁忌食材(基础):') for sc in forbSCList: print(sc + ', '), print('\n----------------------------------------------------------')
def change_logs(self): result = list() try: change_logs = CITaskHistoryService.get_change_log( self.ci_task_history.ChangeLog) json_decoder = JSONDecoder() if change_logs: all_resp_changes = json_decoder.decode( change_logs['change_log']) index = 1 for resp_changes in all_resp_changes: repo = resp_changes['repo'] for changes in resp_changes['changes']: temp_changelog = VM_CITaskChangeLog( changes, index, repo) result.append(temp_changelog) index = index + 1 elif self.ci_task_history.CodeVersion: all_changes = json_decoder.decode( self.ci_task_history.CodeVersion) temp_changelog = VM_CITaskChangeLog(all_changes[0], 0, "") result.append(temp_changelog) except Exception as ex: SimpleLogger.exception(ex) return result
def test_execute(self): inPut = self.config.getInPut() expect = self.config.getExpect() print expect d = JSONDecoder() data = d.decode(inPut.get("parameters")) if inPut.get("method") == "POST": r = requests.post(inPut.get("url"), data) else: r = requests.get(inPut.get("url"), parameters=data) response = d.decode(str(r.text)) expectResponse = d.decode(expect.get("response")) print expect.get("sql") expectSql = d.decode(expect.get("sql")) print expectSql expectSqlQuery = expectSql.get("query") expectSqlResult = tuple(expectSql.get("result").encode(encoding="UTF-8", errors="strict").split(",")) conn = MySQLdb.connect(host="172.18.33.37", user="******", passwd="123456", db="meshare", port=3306) cur = conn.cursor() cur.execute(expectSqlQuery) result = cur.fetchone() self.assertTupleEqual(result, expectSqlResult) self.assertDictContainsSubset(expectResponse, response)
def __init__(self, json_record): decoder = JSONDecoder() values = decoder.decode(json_record) self.state_code = values['stateCode'] self.country_code = values['countryCode'] self.site_num = values['siteNum'] self.parameter_code = values['parameterCode'] self.poc = values['poc'] self.lat = values['latitude'] self.lon = values['longitude'] self.datum = values['datum'] self.parameter_name = values['parameterName'] self.date_local = values['dateLocal'] self.time_local = values['timeLocal'] self.date_gmt = values['dateGMT'] self.time_gmt = values['timeGMT'] self.sample_measurement = values['sampleMeasurement'] self.units_of_measure = values['unitsOfMeasure'] self.mdl = values['mdl'] self.uncertainty = values['uncertainty'] self.qualifier = values['qualifier'] self.method_type = values['methodType'] self.method_code = values['methodCode'] self.method_name = values['methodName'] self.state_name = values['stateName'] self.county_name = values['countyName'] self.date_of_last_change = values['dateOfLastChange'] self.prediction = float(values['prediction']) self.error = float(values['error']) self.anomaly = float(values['anomaly']) self.prediction_next = float(values['predictionNext'])
def loads(s): decoder = JSONDecoder(object_hook=_object_hook, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None) return decoder.decode(s)
def refreshToken (self): params = { "appid" : "wxeb4777f5ed7eca05", "secret" : "ea6f3299547010a8e743bef04a5075a6" } re = self.doPost(self.Constant.get('ACCESSTOKEN'), params) re=JSONDecoder().decode(re) return re.get("access_token")
def getjobinfo(servername, joburl): conn = httplib2.HTTPConnectionWithTimeout(servername) conn.request("GET", joburl + "api/json") r1 = conn.getresponse() data1 = r1.read() json_decoder = JSONDecoder() conn.close() return json_decoder.decode(data1.decode())
def getjenkinsjobs(jenkinsserver): conn = httplib2.HTTPConnectionWithTimeout(jenkinsserver) conn.request("GET", "/jenkins/api/json") r1 = conn.getresponse() data1 = r1.read() json_decoder = JSONDecoder() conn.close() return json_decoder.decode(data1.decode())
def reversegeo(lat, lng): r = urlopen('https://maps.googleapis.com/maps/api/geocode/json?latlng=%f,%f&sensor=true' % (lat, lng)) if r.code == 200 and r.headers['Content-Type'].startswith('application/json'): r = JSONDecoder().decode(r.read()) if r['status'] == 'OK': return r['results'] else: raise GeoLocateError('Unable to reverse geo code lat long: %s.' % r['status']) raise GeoLocateError('Unable to reverse geo code lat long.')
def parseCredentials(pwd, file="secret_credentials.json"): jsondec = JSONDecoder() text = encryption.decrypt(password=pwd) or "{}" data = jsondec.decode(text) print("Opening credential file...") return data.get("SERVER", "example.com"), data.get("PORT", "0"), data.get( "USER", "*****@*****.**"), data.get("PASSWORD", "admin")
def perform_destroy(self, instance): form_data = self.request.POST.get('models', None) if form_data != None: json_decoder = JSONDecoder() validate_data = json_decoder.decode(form_data) task_dependency_id = int(validate_data.get('id', 0)) else: task_dependency_id = int(self.kwargs['id']) TaskService.delete_task_dependency(int(task_dependency_id))
def get_object(self): form_data = self.request.POST.get('models', None) if form_data != None: json_decoder = JSONDecoder() validate_data = json_decoder.decode(form_data) task_id = int(validate_data.get('id', 0)) else: task_id = int(self.kwargs['task_id']) task = models.Task.objects.get(task_id) return task
def getCategoriesForDocument(document): logger = logging.getLogger("DocumentCategory.getCategoriesForDocument") categories = None try : documentCount = DocumentCategoryCounts.objects.get(document=document) jsonDecoder = JSONDecoder() categories = jsonDecoder.decode(documentCount.countData) categories = ClassifierCategory.getCategoriesByIds(categories.keys()) except Exception, ex : logger.exception("Failed to retrieve the categories for the document" + str(ex))
def __checkShutdown(self, job): shutdown = False decoder = JSONDecoder() if job.stats()['tube'] == "shutdown" : shutdown = True args = decoder.decode(job.body) time = args['timestamp'] if time > self.__cTime : sys.exit(0) job.delete() return shutdown
def update(self, request, *args, **kwargs): partial = kwargs.pop('partial', False) form_data = request.POST.get('models', None) if form_data == None: validate_data = request.POST or request.data else: json_decoder = JSONDecoder() validate_data = json_decoder.decode(form_data) task = self.get_object() print(validate_data) TaskService.edit_task(task, validate_data, request.user) serializer = project_serializer.ProjectTaskSerializer(instance=task) return response.Response(serializer.data)
def reversegeo(lat, lng): r = urlopen( 'https://maps.googleapis.com/maps/api/geocode/json?latlng=%f,%f&sensor=true' % (lat, lng)) if r.code == 200 and r.headers['Content-Type'].startswith( 'application/json'): r = JSONDecoder().decode(r.read()) if r['status'] == 'OK': return r['results'] else: raise GeoLocateError('Unable to reverse geo code lat long: %s.' % r['status']) raise GeoLocateError('Unable to reverse geo code lat long.')
def post(self, request, *args, **kwargs): form_data = request.POST.get('models', None) if form_data == None: validate_data = request.data else: json_decoder = JSONDecoder() validate_data = json_decoder.decode(form_data) task = TaskService.create_task(validate_data, request.user) serializer = project_serializer.ProjectTaskSerializer(instance=task) headers = self.get_success_headers(serializer.data) return response.Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
def register_custom_type( self, cls: type, marshaller: Optional[Callable[[Any], Any]] = default_marshaller, unmarshaller: Optional[Callable[[Any, Any], Any]] = default_unmarshaller, *, typename: str = None) -> None: typename = typename or qualified_name(cls) if marshaller: self._marshallers[cls] = typename, marshaller self.encoder_options['default'] = self._default_encoder self._encoder = JSONEncoder(**self.encoder_options) if unmarshaller: self._unmarshallers[typename] = cls, unmarshaller self.decoder_options['object_hook'] = self._custom_object_hook self._decoder = JSONDecoder(**self.decoder_options)
def post(self, request, *args, **kwargs): form_data = request.POST.get('models', None) if form_data == None: validate_data = request.data else: json_decoder = JSONDecoder() validate_data = json_decoder.decode(form_data) task_dependency = TaskService.create_task_dependency(validate_data) serializer = project_serializer.ProjectTaskDependencySerializer( instance=task_dependency, data=validate_data) serializer.is_valid(raise_exception=True) headers = self.get_success_headers(serializer.data) return response.Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
def main(): decoder = JSONDecoder() encoder = JSONEncoder(indent=4) # Loop over every file in the course_data folder. # These files are dictionaries of data for single courses. for course_file in os.listdir('./course_data'): with open(os.path.join('./course_data', course_file)) as c: data = decoder.decode(c.read()) if data is None: continue print('Parsing', data['course_code']) with open('./prerequisites/'+data['course_code']+'.json', 'w') as out: out.write(encoder.encode( parse_prereq(data['prerequisites'], data['course_code']) ))
def _readTrackedLayerFile(self, filename): with open(filename) as f: lines = f.readlines() trackInfo = JSONDecoder(object_hook=decoder).decode("\n".join(lines)) return trackInfo
def find_JSON_format_data_structure( string, name, found, not_found, cannot_parse_JSON ): """Finds a named JSON-format data structure in the string. The name can be any string. The pattern "name = " will be looked for in the string, and the data structure following it parsed and returned as a python data structure. """ try: name_start = string.index(name) except ValueError: not_found(name, string) else: name_length = len(name) name_end = name_start + name_length _, remaining = re.Scanner([ (r"\s*=\s*", lambda scanner, token: None) ]).scan( string[name_end:] ) try: data, end_position = JSONDecoder().raw_decode(remaining) except ValueError, value_error: cannot_parse_JSON(remaining) else:
def _refresh_window_state(self): """ The alerts and other required data for managing alerts are stored as a Javascript array in window.STATE. Returns: The parsed value of window.STATE """ alerts_url = 'https://www.' + _GOOGLE_DOMAIN + '/alerts?hl=en&gl=us' response = self.opener.open(alerts_url) resp_code = response.getcode() body = response.read() if resp_code != 200: raise UnexpectedResponseError(resp_code, [], body) soup = BeautifulSoup(body, convertEntities=BeautifulSoup.HTML_ENTITIES) # the alerts data is stored in window.STATE defined in one of the # <script> tags script = soup.find('script', text=re.compile(r'window\.STATE\s*=')) state_value_match = re.search(r'window\.STATE\s*=\s*(.*)', script.string) if state_value_match is None: raise ParseFailureError( "Couldn't find the definition of window.STATE in the Google Alerts page" ) state_value_string = state_value_match.group(1) state_value = JSONDecoder().raw_decode(state_value_string)[0] self.window_state = WindowState(state_value) self.account = self.window_state.accounts[self.email]
def from_json(cls, json: str): """ Deserialize from json :param str json: the json string :return: a board :rtype: Board """ # step 1 - init object board = Board(1, 1, 0) obj = JSONDecoder().decode(json) # step 2 - load the data into the board object obj["_is_mine"] = np.array(obj["_is_mine"]) obj["_is_open"] = np.array(obj["_is_open"]) obj["_adj_mines"] = np.array(obj["_adj_mines"]) obj["_marker"] = np.array(obj["_marker"]) board.__dict__ = obj # step 3 - load exposed squares exposed = [] for x in range(board.x): for y in range(board.y): if board._is_open[x, y]: board._append_mine(x, y, exposed) # step 4 - load markers markers = [] for x in range(board.x): for y in range(board.y): if board._marker[x, y] != Marker.clear and not board._is_open[x, y]: markers.append((x, y, board._marker[x, y])) return board, exposed, markers
def _decoder(self): symbol_table = self.symbol_table.table decoder = functools.partial( self._object_decoder, symbol_table=symbol_table.__getitem__ if symbol_table else self._as_type, ) return JSONDecoder(object_hook=decoder, strict=True)
def loadModelFromDisk(self, modelDiskPath): ''' ''' modelReadObj = open(modelDiskPath, 'r') modelLines = modelReadObj.readlines() modelCntDic = { 'start_p': None, 'emit_p': None, 'tag_states': None, 'trans_p': None } for line in modelLines: label, element = line[:line.find('@')], JSONDecoder().decode( line[line.find('@') + 1:]) modelCntDic[label] = element if None in modelCntDic.values(): print('model with some errors, please check!') return None modelReadObj.close() model = self.__init__(modelCntDic['start_p'], modelCntDic['emit_p'], modelCntDic['tag_states'], modelCntDic['trans_p']) return model
def __init__(self, encoder_options: Dict[str, Any] = None, decoder_options: Dict[str, Any] = None, encoding: str = 'utf-8', custom_type_codec: Union[JSONTypeCodec, str] = None): assert check_argument_types() super().__init__(resolve_reference(custom_type_codec) or JSONTypeCodec()) self.encoding = encoding self.encoder_options = encoder_options or {} self.encoder_options['default'] = resolve_reference(self.encoder_options.get('default')) self._encoder = JSONEncoder(**self.encoder_options) self.decoder_options = decoder_options or {} self.decoder_options['object_hook'] = resolve_reference( self.decoder_options.get('object_hook')) self.decoder_options['object_pairs_hook'] = resolve_reference( self.decoder_options.get('object_pairs_hook')) self._decoder = JSONDecoder(**self.decoder_options)
def __init__(self, dispatcher, db_host, db_port, db_num, db_pw): self.dispatcher = dispatcher pool = ConnectionPool(max_connections=2, db=db_num, host=db_host, port=db_port, password=db_pw) self.redis = StrictRedis(connection_pool=pool) self.encoder = JSONEncoder() self.decoder = JSONDecoder() self.class_map = {} self.object_map = {}
def getJSON(xml_string): """ Will return JSON tree only from OpenAPIv3 response. params: xml_string: OpenAPIv3 response XML string. """ tree = ElementTree.fromstring(xml_string) return JSONDecoder().decode(tree.text)
def update(self, request, *args, **kwargs): partial = kwargs.pop('partial', False) form_data = request.POST.get('models', None) if form_data == None: validate_data = request.POST or request.data else: json_decoder = JSONDecoder() validate_data = json_decoder.decode(form_data) task = self.get_object() TaskService.edit_task(task, validate_data, request.user) serializer = self.get_serializer(task, data=validate_data, partial=partial) serializer.is_valid(raise_exception=True) self.perform_update(serializer) if getattr(task, '_prefetched_objects_cache', None): task._prefetched_objects_cache = {} return response.Response(serializer.data)
def task_changelog_detail(self, history_id, select_version): result = None history = CITaskHistory.objects.get(int(history_id), is_active=0) change_logs = CITaskHistoryService.get_change_log(history.ChangeLog) json_decoder = JSONDecoder() if change_logs: all_resp_changes = json_decoder.decode(change_logs['change_log']) for resp_changes in all_resp_changes: repo = resp_changes['repo'] for changes in resp_changes['changes']: temp_changelog = VM_CITaskChangeLog(changes, 0, repo) if temp_changelog.version == select_version: result = temp_changelog break if result: break pagefileds = {"changefile": result} return self.get_webpart(pagefileds, CITaskPath.task_changelog_detail)
def get_document(): """Dato un URL come *querystring*, restituisce l'html del documento corrispondente""" if request.method == 'POST': doc_url = JSONDecoder().decode(request.data)['url'] return add_document_to_fuseki(doc_url) elif request.method == 'GET': doc_url = request.args.get('url') return get_doc(doc_url)
def __init__(self, encoder_options: Dict[str, Any] = None, decoder_options: Dict[str, Any] = None, encoding: str = 'utf-8', custom_type_key: str = '__type__'): self.encoding = encoding self.custom_type_key = custom_type_key self._marshallers = OrderedDict() # class -> (typename, marshaller function) self._unmarshallers = OrderedDict() # typename -> (class, unmarshaller function) self.encoder_options = encoder_options or {} self.encoder_options['default'] = resolve_reference(self.encoder_options.get('default')) self._encoder = JSONEncoder(**self.encoder_options) self.decoder_options = decoder_options or {} self.decoder_options['object_hook'] = resolve_reference( self.decoder_options.get('object_hook')) self.decoder_options['object_pairs_hook'] = resolve_reference( self.decoder_options.get('object_pairs_hook')) self._decoder = JSONDecoder(**self.decoder_options)
def _geolocate(networks): if networks: p = '/maps/api/browserlocation/json?browser=sploitego&sensor=true' for n in networks: p += '&%s' % urlencode({'wifi':'mac:%s|ssid:%s|ss:%s' % (_fullmac(n['mac']), n['ssid'], n['ss'])}) print p c = HTTPSConnection('maps.googleapis.com') c.request('GET', p) r = c.getresponse() if r.status == 200 and r.getheader('Content-Type').startswith('application/json'): j = JSONDecoder() d = j.decode(r.read()) if d['status'] == 'OK': l = d['location'] return {'latitude':l['lat'],'longitude':l['lng'],'accuracy':d['accuracy']} raise GeoLocateError('Unable to geolocate.')
def readFrom(cls, fileName): if os.path.exists(fileName): with open(fileName) as f: lines = f.readlines() jsonstring = "\n".join(lines) else: return if jsonstring: return JSONDecoder(object_hook=decoder).decode(jsonstring)
def refresh_signer_token(self): url = "{}/token/{}".format(self.host, self.app_name) authorization_token = 'Bearer {}'.format(self.token) expired_at = datetime.now() + self.signer_token_expiry_duration expired_at_str = str(int(expired_at.timestamp())) resp = request('GET', url, headers={'Authorization': authorization_token}, params={'expired_at': expired_at_str}) resp_dict = None try: resp_dict = JSONDecoder().decode(resp.text) except Exception as ex: raise Exception( 'Fail to decode the response from cloud asset') resp_err = resp_dict.get('Error') if resp_err: raise Exception('Fail to get the signer token') self.signer_token = CloudAssetSignerToken.create(resp_dict)
def decode(self, s,): o = JSONDecoder.decode(self, s) pickle_str = o.get(JSONCodec._obj, None) if pickle_str: #file_ver = o[JSONCodec._ver_key] # if file_ver != JSONCodec._ver: # msg = 'Unsopported json-encoded version(%s != %s)!' # raise ValueError(msg % (file_ver, JSONCodec._ver)) pickle_bytes = binascii.a2b_qp(pickle_str.encode( encoding='utf8')) o = pickle.loads(pickle_bytes) return o
class JSONSerializer(CustomizableSerializer): """ Serializes objects using JSON (JavaScript Object Notation). See the :mod:`json` module documentation in the standard library for more information on available options. Certain options can resolve references to objects: * ``encoder_options['default']`` * ``decoder_options['object_hook']`` * ``decoder_options['object_pairs_hook']`` :param encoder_options: keyword arguments passed to :class:`~json.JSONEncoder` :param decoder_options: keyword arguments passed to :class:`~json.JSONDecoder` :param encoding: the text encoding to use for converting to and from bytes :param custom_type_codec: wrapper to use to wrap custom types after marshalling """ __slots__ = ('encoder_options', 'decoder_options', 'encoding', 'custom_type_codec', '_encoder', '_decoder', '_marshallers', '_unmarshallers') def __init__(self, encoder_options: Dict[str, Any] = None, decoder_options: Dict[str, Any] = None, encoding: str = 'utf-8', custom_type_codec: Union[JSONTypeCodec, str] = None) -> None: assert check_argument_types() super().__init__(resolve_reference(custom_type_codec) or JSONTypeCodec()) self.encoding = encoding self.encoder_options = encoder_options or {} self.encoder_options['default'] = resolve_reference(self.encoder_options.get('default')) self._encoder = JSONEncoder(**self.encoder_options) self.decoder_options = decoder_options or {} self.decoder_options['object_hook'] = resolve_reference( self.decoder_options.get('object_hook')) self.decoder_options['object_pairs_hook'] = resolve_reference( self.decoder_options.get('object_pairs_hook')) self._decoder = JSONDecoder(**self.decoder_options) def serialize(self, obj) -> bytes: return self._encoder.encode(obj).encode(self.encoding) def deserialize(self, payload: bytes): text_payload = payload.decode(self.encoding) return self._decoder.decode(text_payload) @property def mimetype(self): return 'application/json'
def __init__(self, encoder_options: Dict[str, Any] = None, decoder_options: Dict[str, Any] = None, encoding: str = 'utf-8', custom_type_codec: Union[JSONTypeCodec, str] = None) -> None: assert check_argument_types() super().__init__(resolve_reference(custom_type_codec) or JSONTypeCodec()) self.encoding = encoding self.encoder_options = encoder_options or {} self.encoder_options['default'] = resolve_reference(self.encoder_options.get('default')) self._encoder = JSONEncoder(**self.encoder_options) self.decoder_options = decoder_options or {} self.decoder_options['object_hook'] = resolve_reference( self.decoder_options.get('object_hook')) self.decoder_options['object_pairs_hook'] = resolve_reference( self.decoder_options.get('object_pairs_hook')) self._decoder = JSONDecoder(**self.decoder_options)
class DialogResource(Resource): IsLeaf = True def __init__(self, sm): self.sm = sm self.decoder = JSONDecoder() def render_POST(self, request): try: session_id = request.args[ARG_SESSION_ID][0] session = self.sm.get_session(session_id) if not session: return LinkError(ERROR_SESSION_NOT_FOUND).json response = request.args[ARG_DIALOG_RESPONSE][0] return session.dialog_response(self.decoder.decode(response)).json except: display = Display() display.append(DisplayLine(traceback.format_exc(), 0xff0000)) return display.json
class Connector: """description of class""" def __init__(self, api_key): self.key = api_key self.host = "api.projectoxford.ai" self.base_url = "/face/v1.0/{}" self.encoder = JSONEncoder() self.decoder = JSONDecoder() def encode_json(self, dictionary): """ encodes dictionaries to json to send to API """ return self.encoder.encode(dictionary) def decode_json(self, json): """ decodes json to a dictionary """ return self.decoder.decode(json) def send_request(self, method, url, qs_args=None, headers=None, body=None): """ Sends a request to the API. """ # Because having a dictionary as default value is dangerous if qs_args is None: qs_args = {} if headers is None: headers = {} # Check what content type header to include in the HTTP message if hasattr(body, "read") or isinstance(body, bytes): headers["Content-Type"] = "application/octet-stream" else: body = self.encode_json(body) headers["Content-Type"] = "application/json" connection = HTTPSConnection(self.host) # Format the url url = self.base_url.format(url) if len(qs_args) > 0: url += "?{}".format(urlencode(qs_args)) # Add api-key to the headers headers["Ocp-Apim-Subscription-Key"] = self.key # Send the request connection.request(method, url, headers=headers, body=body) # Read the response and try to decode JSON response = connection.getresponse() data_bytes = response.read() data = data_bytes.decode() # TODO: Except data that is not JSON if len(data) > 0: data = self.decode_json(data) return data, response
def loads(s): decoder = JSONDecoder(encoding="utf-8", object_hook=_object_hook, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None) return decoder.decode(s)
def __init__(self): JSONDecoder.__init__(self, strict=False) self.scan_once = make_scanner(self)
class RedisStore(): def __init__(self, dispatcher, db_host, db_port, db_num, db_pw): self.dispatcher = dispatcher pool = ConnectionPool(max_connections=2, db=db_num, host=db_host, port=db_port, password=db_pw) self.redis = StrictRedis(connection_pool=pool) self.encoder = JSONEncoder() self.decoder = JSONDecoder() self.class_map = {} self.object_map = {} def create_object(self, dbo, update_rev=False): self.save_object(dbo) dbo.on_loaded() def save_object(self, dbo, update_rev=False, autosave=False): if update_rev: dbo.dbo_rev = getattr(dbo, "dbo_rev", 0) + 1 json_obj = self.build_json(dbo) key = dbo.dbo_key self.redis.set(key, self.encoder.encode(json_obj)) if dbo.dbo_set_key: self.redis.sadd(dbo.dbo_set_key, key) self.dispatcher.dispatch("db_log{0}".format("_auto" if autosave else ""), "object saved: " + key) self.object_map[dbo.dbo_key] = dbo; def build_json(self, dbo): dbo.before_save() json_obj = {} if dbo.__class__ != dbo.dbo_base_class: json_obj["class_name"] = dbo.__module__ + "." + dbo.__class__.__name__ for field_name in dbo.dbo_fields: json_obj[field_name] = getattr(dbo, field_name, None) for dbo_col in dbo.dbo_collections: coll_list = list() for child_dbo in getattr(dbo, dbo_col.field_name): if dbo_col.key_type: coll_list.append(child_dbo.dbo_id) else: coll_list.append(self.build_json(child_dbo)) json_obj[dbo_col.field_name] = coll_list for dbo_ref in dbo.dbo_refs: ref = getattr(dbo, dbo_ref.field_name, None) if ref: json_obj[dbo_ref.field_name] = ref.dbo_id return json_obj def cache_object(self, dbo): self.object_map[dbo.dbo_key] def load_cached(self, key): return self.object_map.get(key) def evict(self, dbo): try: del self.object_map[dbo.dbo_key] except: self.dispatcher.dispatch("db_log", "Failed to evict " + dbo.dbo_key + " from db cache") def load_by_key(self, key_type, key, base_class=None): dbo_key = key_type + ":" + key cached_dbo = self.object_map.get(dbo_key) if cached_dbo: return cached_dbo json_str = self.redis.get(dbo_key) if not json_str: return None json_obj = self.decoder.decode(json_str) dbo = self.load_class(json_obj, base_class)(key) if dbo.dbo_key_type: self.object_map[dbo.dbo_key] = dbo self.load_json(dbo, json_obj) return dbo def load_class(self, json_obj, base_class): class_path = json_obj.get("class_name") if not class_path: return base_class clazz = self.class_map.get(class_path) if clazz: return clazz split_path = class_path.split(".") module_name = ".".join(split_path[:-1]) class_name = split_path[-1] module = __import__(module_name, globals(), locals(), [class_name]) clazz = getattr(module, class_name) self.class_map[class_path] = clazz return clazz def load_object(self, dbo_class, key): return self.load_by_key(dbo_class.dbo_key_type, key, dbo_class) def load_json(self, dbo, json_obj): for field_name in dbo.dbo_fields: try: setattr(dbo, field_name, json_obj[field_name]) except KeyError: self.dispatcher.dispatch("db_log", "db: Object " + dbo.dbo_key + " json missing field " + field_name) for dbo_col in dbo.dbo_collections: coll = getattr(dbo, dbo_col.field_name, []) try: for child_json in json_obj[dbo_col.field_name]: if dbo_col.key_type: child_dbo = self.load_by_key(dbo_col.key_type, child_json, dbo_col.base_class) else: child_dbo = self.load_class(child_json, dbo_col.base_class)() self.load_json(child_dbo, child_json) coll.append(child_dbo) except AttributeError: self.dispatcher.dispatch("db_log", "{0} json failed to load for coll {1} in {2}".format(child_json, dbo_col.field_name, dbo.dbo_id)) except KeyError: self.dispatcher.dispatch("db_log", "db: Object " + dbo.dbo_key + " json missing collection " + dbo_col.field_name) for dbo_ref in dbo.dbo_refs: try: ref_key = json_obj[dbo_ref.field_name] ref_obj = self.load_by_key(dbo_ref.key_type, ref_key, dbo_ref.base_class) setattr(dbo, dbo_ref.field_name, ref_obj) except: self.dispatcher.dispatch("db_log", "db: Object " + dbo.dbo_key + " json missing ref " + dbo_ref.field_name) dbo.on_loaded() return True def delete_object(self, dbo): key = dbo.dbo_key self.redis.delete(key) if dbo.dbo_set_key: self.redis.srem(dbo.dbo_set_key, key) for dbo_col in dbo.dbo_collections: if dbo_col.key_type: coll = getattr(dbo, dbo_col.field_name, set()) for child_dbo in coll: self.delete_object(child_dbo) self.dispatcher.dispatch("db_log", "object deleted: " + key) if self.object_map.get(dbo.dbo_key): del self.object_map[dbo.dbo_key] return True def fetch_set_keys(self, set_key): return self.redis.smembers(set_key)
def geoip(ip=''): r = urlopen('http://smart-ip.net/geoip-json/%s' % ip) if r.code == 200: j = JSONDecoder() return j.decode(r.read()) return None
def __init__(self, api_key): self.key = api_key self.host = "api.projectoxford.ai" self.base_url = "/face/v1.0/{}" self.encoder = JSONEncoder() self.decoder = JSONDecoder()
def __init__(self, sm): self.sm = sm self.decoder = JSONDecoder()
class Trainer(object): categories = {} def __init__(self): self._jsonDecoder = JSONDecoder() self._jsonEncoder = JSONEncoder() self.__featureExtractor = FeatureExtractor() def setFeatureExtractor(self, featureExtractor): self.__featuredExtractor = featureExtractor def __isNumeric(self, feature): isNumeric = False try: float(feature) isNumeric = True except ValueError: pass return isNumeric """ Given a list of yes category names, retrieves the yes/no category hash that the trainer needs """ def __getCategoriesFromNames(self, yesTagNames, noTagNames): finalCategories = [] # create the categories if they don't already exist for tagName in yesTagNames: if tagName: categoryYes, _ = ClassifierCategory.objects.get_or_create(categoryName=tagName, yes=True) categoryNo, _ = ClassifierCategory.objects.get_or_create(categoryName=tagName, yes=False) finalCategories.append(categoryYes) for tagName in noTagNames: if tagName: categoryYes, _ = ClassifierCategory.objects.get_or_create(categoryName=tagName, yes=True) categoryNo, _ = ClassifierCategory.objects.get_or_create(categoryName=tagName, yes=False) finalCategories.append(categoryNo) return finalCategories """ Trains a corpus of data. """ @transaction.commit_manually def train(self, corpus="", yesTagNames=None, noTagNames=None): logger = logging.getLogger("Trainer.train") success = False categories = [] try: document = Document.getDocumentByCorpus(corpus) if not document: features = self.__featuredExtractor.getFeatures(corpus) categories = self.__getCategoriesFromNames(yesTagNames, noTagNames) document = Document(corpus=corpus) document.save() documentCounts = {} for category in categories: self.__incrementCategoryCount(documentCounts, category) DocumentCategoryCounts(document=document, countData=self._jsonEncoder.encode(documentCounts)).save() for feature in features: featureCount, _ = FeatureCounts.objects.get_or_create(featureName=feature) counts = self._jsonDecoder.decode(featureCount.countData) if featureCount.countData else {} for category in categories: self.__incrementCategoryCount(counts, category) featureCount.countData = self._jsonEncoder.encode(counts) featureCount.save() # We keep an index of category document counts for faster classification later on catDocCountIndex = CategoryDocumentCountIndex.getCountIndex() index = self._jsonDecoder.decode(catDocCountIndex.countData) if catDocCountIndex.countData else {} for category in categories: self.__incrementCategoryCount(index, category) catDocCountIndex.countData = self._jsonEncoder.encode(index) catDocCountIndex.save() success = True transaction.commit() else: logger.info("Document already exists: " + str(document.id) + " - " + document.corpusHash) success = True except Exception, ex: logger.info("Bad data:%s" % corpus) logger.exception("Failed to save the trained data: " + str(ex)) transaction.rollback() return success
def __init__(self): self._jsonDecoder = JSONDecoder() self._jsonEncoder = JSONEncoder() self.__featureExtractor = FeatureExtractor()