def build_url(word, input_lang, output_lang, email): url = __url_template__.format(encode(word, 'utf-8'), input_lang, output_lang) if email is not None: url += '&de=' + email return url
def getResponseContent(self, url): try: response = urllib2.urlopen(url, encode('utf-8')) except: print '连接失败' else: print '连接成功' return response.read()
def encode(self, encoding="utf-8", errors="strict"): """Decode the bytes using the codec registered for encoding. encoding The encoding with which to decode the bytes. errors The error handling scheme to use for the handling of decoding errors. The default is 'strict' meaning that decoding errors raise a UnicodeDecodeError. Other possible values are 'ignore' and 'replace' as well as any other name registered with codecs.register_error that can handle UnicodeDecodeErrors. """ return _codecs.encode(self, encoding=encoding, errors=errors)
def test_one_arg_encoder(self): import _codecs def search_function(encoding): def encode_one(u): return (b'foo', len(u)) def decode_one(u): return (u'foo', len(u)) if encoding == 'onearg': return (encode_one, decode_one, None, None) return None _codecs.register(search_function) assert u"hello".encode("onearg") == b'foo' assert b"hello".decode("onearg") == u'foo' assert _codecs.encode(u"hello", "onearg") == b'foo' assert _codecs.decode(b"hello", "onearg") == u'foo'
def parser_weather(): weather_list = list() weather = list() class_table = ['ArchiveTemp', 'ArchiveTempFealing'] main_domain_stat = 'http://rp5.ru/%D0%9F%D0%BE%D0%B3%D0%BE%D0%B4%D0%B0_%D0%B2_%D0%A3%D0%BB%D1%8C%D1%8F%D0%BD%D0%BE%D0%B2%D1%81%D0%BA%D0%B5' page = html.parse(main_domain_stat) e = page.getroot().find_class(class_table[0]) for i in e: a = i.find_class('t_0') weather_list.append(a[0].text_content()) e = page.getroot().find_class(class_table[1]) for i in e: a = i.find_class('t_0') q = encode(a[0].text_content()[:3]) weather_list.append(q) return weather_list
def encode(self, encoding="utf-8", errors="strict"): """Decode the bytes using the codec registered for encoding. encoding The encoding with which to decode the bytes. errors The error handling scheme to use for the handling of decoding errors. The default is 'strict' meaning that decoding errors raise a UnicodeDecodeError. Other possible values are 'ignore' and 'replace' as well as any other name registered with codecs.register_error that can handle UnicodeDecodeErrors. """ result = _codecs.encode(self, encoding=encoding, errors=errors) if not isinstance(result, bytes): if isinstance(result, bytearray): return bytes(result) raise TypeError( "'%s' encoder returned '%s' instead of 'bytes'; use codecs.encode() to encode to arbitrary types" % (encoding, type(result).__name__)) return result
def test_encode(): ''' ''' #sanity new_str = codecs.encode("abc") AreEqual(new_str, 'abc')
def msgToJSON(self, message, done=False): self.logger.debug('Encoding msg to json: message={}'.format( vars(message))) topic = message.delivery_info['routing_key'] if message.body[0] == '[': # early v03 message to persist, (message.pubtime, message.baseurl, message.relpath, headers) = json.loads(message.body) notice = "%s %s %s" % (message.pubtime, message.baseurl, message.relpath) elif message.body[0] == '{': # late v03 message to persist, headers = json.loads(message.body) message.version = 'v03' message.pubtime = headers["pubTime"] message.baseurl = headers["baseUrl"] message.relpath = headers["relPath"] notice = "%s %s %s" % (message.pubtime, message.baseurl, message.relpath) if 'integrity' in headers.keys(): # v3 has no sum, must add it here sum_algo_map = { "a": "arbitrary", "d": "md5", "s": "sha512", "n": "md5name", "0": "random", "L": "link", "R": "remove", "z": "cod" } sum_algo_map = {v: k for k, v in sum_algo_map.items()} sumstr = sum_algo_map[headers['integrity']['method']] if sumstr == '0': sumstr = '{},{}'.format(sumstr, headers['integrity']['value']) elif sumstr == 'z': sumstr = '{},{}'.format( sumstr, sum_algo_map[headers['integrity']['value']]) else: decoded_value = encode( decode(headers['integrity']['value'].encode('utf-8'), 'base64'), 'hex').decode('utf-8').strip() sumstr = '{},{}'.format(sumstr, decoded_value) headers['sum'] = sumstr if sumstr == 'R': message.event = 'delete' elif sumstr == 'L': message.event = 'remove' else: message.event = 'modify' del headers['integrity'] if 'size' in headers.keys(): parts_map = {'inplace': 'i', 'partitioned': 'p'} if 'blocks' not in headers.keys(): partstr = "%s,%s,%s,%s,%s" % ('1', headers['size'], '1', '0', '0') else: partstr = "%s,%s,%s,%s,%s" % ( parts_map[headers['blocks']['method']], headers['blocks']['size'], headers['blocks']['count'], headers['blocks']['remainder'], headers['blocks']['number']) del headers['blocks'] del headers['size'] headers['parts'] = partstr else: headers = message.properties['application_headers'] if type(message.body) == bytes: notice = message.body.decode("utf-8") else: notice = message.body message.version = 'v02' if 'sum' in headers: sumstr = headers['sum'][0] if sumstr == 'R': message.event = 'delete' elif sumstr == 'L': message.event = 'remove' else: message.event = 'modify' if done: headers['_retry_tag_'] = 'done' return json.dumps([topic, headers, notice], sort_keys=True) + '\n'
def rln_modified(l): def foo(tuple): return tuple[1] if tuple[0] == 1 else tuple return map(foo, encode(l))
ob = Instance(cls.name, ()) if state is not None: ob.__setstate__(state) return ob def handle_set(args): [arg] = args if isinstance(arg, Put): arg = arg.v return sorted(arg) special_classes = { 'datetime.datetime': lambda args: datetime_(*args), 'datetime.date': lambda args: datetime.date(dt_bytes(*args)).isoformat(), '_codecs.encode': lambda args: _codecs.encode(*args), 'copy_reg._reconstructor': reconstruct, '__builtin__.frozenset': handle_set, '__builtin__.set': handle_set, 'builtins.frozenset': handle_set, 'builtins.set': handle_set, 'decimal.Decimal': lambda args: float(args[0]), } def instance(global_, args): name = global_.name if name in special_classes: return special_classes[name](args) return Instance(name, args)