def decrypt_data(): aes_cipher = AESCipher(client.secret_key) encrypted_uri = self.handler.request.headers.get('X-Api-Encrypted-Uri') if encrypted_uri: request.uri = aes_cipher.decrypt(utf8(encrypted_uri)) logger.debug('decrypted uri %s' % request.uri) # 因为修改了 uri,需要重新生成 query_arguments request.path, sep, request.query = request.uri.partition('?') request.arguments = parse_qs_bytes(request.query, keep_blank_values=True) request.query_arguments = copy.deepcopy(request.arguments) encrypted_headers = self.handler.request.headers.get('X-Api-Encrypted-Headers') if encrypted_headers: headers_str = aes_cipher.decrypt(utf8(encrypted_headers)) headers = dict(json_decode(headers_str)) # logger.debug('raw headers %s' % request.headers) for k, v in iteritems(headers): # 要全部使用 text_type,否则会出现有的为 str,有的为 unicode # 导致422错误 request.headers[text_type(k)] = text_type(v) # logger.debug('decrypted headers %s' % request.headers) if request.body and len(request.body) > 0: logger.debug('解密 body') logger.debug(request.body) request.body = aes_cipher.decrypt(utf8(request.body)) # 因为修改了 body,需要重新 _parse_body request._parse_body()
def decrypt_data(): aes_cipher = AESCipher(client.secret_key) encrypted_uri = self.handler.request.headers.get( 'X-Api-Encrypted-Uri') if encrypted_uri: request.uri = aes_cipher.decrypt(utf8(encrypted_uri)) logger.debug('decrypted uri %s' % request.uri) encrypted_headers = self.handler.request.headers.get( 'X-Api-Encrypted-Headers') if encrypted_headers: headers_str = aes_cipher.decrypt(utf8(encrypted_headers)) headers = dict(json.loads(headers_str)) logger.debug('raw headers %s' % request.headers) for k, v in headers.iteritems(): # 要全部使用 text_type,否则会出现有的为 str,有的为 unicode # 导致422错误 request.headers[text_type(k)] = text_type(v) logger.debug('decrypted headers %s' % request.headers) if request.body and len(request.body) > 0: logger.debug('解密 body') logger.debug(request.body) request.body = aes_cipher.decrypt(utf8(request.body))
def _extract_html_(rule, doc, url_list, result): tree = doc.getroottree() if rule.has_key('xpath'): node_list = doc.xpath(rule['xpath']) if rule.has_key('children'): for node in node_list: item = {} for child in rule['children']: _extract_html_(child, node, url_list, item) if item: if rule.has_key('key'): if not result.has_key(rule['key']): result[rule['key']] = [] result[rule['key']].append(item) else: result.update(item) else: for node in node_list: node = node.strip() if rule.has_key('type') and rule['type'] == 'url': url_list.append( utils.utf8( consume_absolute_url( url, consume_urlencode( node, tree.docinfo.encoding ) ) ) ) if rule.has_key('key'): result[rule['key']] = utils.utf8(node)
def __request(self): conn = self.__conn conn.request(utils.utf8(self.method), utils.utf8(self.uri), body = self.body, headers = self.headers) response = conn.getresponse() return Response(response)
def urlencode(data): params = [] for key, value in data.items(): if value == None: continue params.append("%s=%s" % (utils.utf8(key), quote_plus(utils.utf8(value)))) params_string = '&'.join(params) return params_string
def ver(bot, update): bot.sendChatAction(chat_id=update.callback_query.from_user.id, action=ChatAction.TYPING) usuario = usuarios.getCollection().find_one( {'chat_id': update.callback_query.from_user.id}) logger.info('Elegir ({}) {}'.format(utf8(usuario['nombre']), update.callback_query.data)) update.callback_query.edit_message_text( text="Actividades de {}!".format(utf8(usuario['nombre']))) return VER
def _response_string_to_sign(self, response_headers, request, response_body): """ Return the canonical StringToSign as well as a dict containing the original version of all headers that were included in the StringToSign. """ headers_to_sign = self._response_headers_to_sign(response_headers) canonical_headers = self._canonical_headers(headers_to_sign) string_to_sign = b'\n'.join([utf8(request.method.upper()), utf8(self.client.raw_uri), utf8(canonical_headers), utf8(response_body)]) return string_to_sign
def response_string_to_sign(self, response): """ Return the canonical StringToSign as well as a dict containing the original version of all headers that were included in the StringToSign. """ headers_to_sign = self.response_headers_to_sign(response.headers) canonical_headers = self.canonical_headers(headers_to_sign) string_to_sign = b'\n'.join([utf8(self.request_data.method.upper()), utf8(self.request_data.uri), utf8(canonical_headers), utf8(response.content)]) return string_to_sign
def h_line(self, i): out = [] for x in i[THING]: if isinstance(x, (unicode, str)): out.append(utf8(x)) elif x[WHAT] == 'itpl': o = self.h(x[NAME]) if x[FILTER]: o = self.filter(o) else: o = (o is not None and utf8(o)) or "" out.append(o) else: raise WTF, x return ''.join(out)
def _request_string_to_sign(self, request): """ Return the canonical StringToSign as well as a dict containing the original version of all headers that were included in the StringToSign. """ headers_to_sign = self._request_headers_to_sign(request) canonical_headers = self._canonical_headers(headers_to_sign) string_to_sign = b'\n'.join([ utf8(request.method.upper()), utf8(request.uri), utf8(canonical_headers), utf8(request.body) ]) return string_to_sign
def response_string_to_sign(self, response): """ Return the canonical StringToSign as well as a dict containing the original version of all headers that were included in the StringToSign. """ headers_to_sign = self.response_headers_to_sign(response.headers) canonical_headers = self.canonical_headers(headers_to_sign) string_to_sign = b'\n'.join([ utf8(self.request_data.method.upper()), utf8(self.request_data.uri), utf8(canonical_headers), utf8(response.content) ]) return string_to_sign
def signature_request(self): string_to_sign = self.string_to_sign() # 如果不是 unicode 输出会引发异常 # logger.debug('string_to_sign: %s' % string_to_sign.decode('utf-8')) hash_value = sha1(utf8(string_to_sign)).hexdigest() signature = self.sign_string(hash_value) return signature
def check_response(self, response): logger.debug(response.headers) try: timestamp = int(response.headers.get('X-Api-Timestamp')) except ValueError: logger.debug('Invalid X-Api-Timestamp Header') return False now_ts = int(time.time()) if abs(timestamp - now_ts) > self.signature_expire_seconds: logger.debug('Expired signature, timestamp: %s' % timestamp) logger.debug('Expired Signature') return False signature = response.headers.get('X-Api-Signature') if signature: del response.headers['X-Api-Signature'] else: logger.debug('No signature provide') return False string_to_sign = self.response_string_to_sign(response) logger.debug(string_to_sign) # 如果不是 unicode 输出会引发异常 # logger.debug('string_to_sign: %s' % string_to_sign.decode('utf-8')) hash_value = sha256(utf8(string_to_sign)).hexdigest() real_signature = self.sign_string(hash_value) if signature != real_signature: logger.debug('Signature not match: %s, %s' % (signature, real_signature)) return False else: return True
def post(self, uri, data=None, json=None, params=None, headers=None, **kwargs): url = self.prepare_request('POST', uri, params=params, data=data, json=json, headers=headers) if self.encrypt_type == 'aes': url = self.encrypt_data() self.request_data.headers.update(self.get_auth_headers()) logger.debug(self.request_data.headers) signature = self.signature_request() self.request_data.headers['X-Api-Signature'] = signature r = requests.post(url, headers=self.request_data.headers, data=utf8(self.request_data.body), **kwargs) logger.debug(url) logger.debug(self.request_data.headers) if r.status_code != GATEWAY_ERROR_STATUS_CODE: is_valid = self.check_response(r) if not is_valid: logger.debug('返回结果签名不正确') r_encrypt_type = r.headers.get('x-api-encrypt-type', 'raw') if r_encrypt_type == 'aes': r._content = self.decrypt_data(r.content) return r
def auth_request(self, request): try: timestamp = int(request.headers.get('X-Api-Timestamp')) except ValueError: raise AuthRequestException('Invalid X-Api-Timestamp Header') now_ts = int(time.time()) if abs(timestamp - now_ts) > settings.SIGNATURE_EXPIRE_SECONDS: logger.debug('Expired signature, timestamp: %s' % timestamp) raise AuthRequestException('Expired Signature') signature = request.headers.get('X-Api-Signature') if signature: del request.headers['X-Api-Signature'] else: logger.debug('No Signature Provided') raise AuthRequestException('No Signature Provided') string_to_sign = self._request_string_to_sign(request) # 如果不是 unicode 输出会引发异常 # logger.debug('string_to_sign: %s' % string_to_sign.decode('utf-8')) hash_value = sha256(utf8(string_to_sign)).hexdigest() real_signature = self.sign_string(hash_value) if signature != real_signature: logger.debug('Signature not match: %s, %s' % (signature, real_signature)) raise AuthRequestException('Invalid Signature')
def __request(self): def utf8_headers(headers): _headers = {} for key, value in headers.items(): _headers[utils.utf8(key)] = utils.utf8(value) return _headers conn = self.__conn conn.request(utils.utf8(self.method), utils.utf8(self.uri), body=utils.utf8(self.body), headers=utf8_headers(self.headers)) response = conn.getresponse() return Response(response)
def header(hdr, value, unique=False): """ Adds the header `hdr: value` with the response. If `unique` is True and a header with that name already exists, it doesn't add a new one. """ hdr, value = utf8(hdr), utf8(value) # protection against HTTP response splitting attack if '\n' in hdr or '\r' in hdr or '\n' in value or '\r' in value: raise ValueError, 'invalid characters in header' if unique is True: for h, v in ctx.headers: if h.lower() == hdr.lower(): return ctx.headers.append((hdr, value))
def render(template, params): jinja = Environment( loader=FileSystemLoader([os.path.join(ROOT, '_mobi')]), autoescape=False, ) jinja.filters.update({'xmldatetime': xmldatetime}) tpl = jinja.get_template(template) return utf8(tpl.render(params))
def signature_request(self): string_to_sign = self.string_to_sign() logger.debug(string_to_sign) # 如果不是 unicode 输出会引发异常 # logger.debug('string_to_sign: %s' % string_to_sign.decode('utf-8')) hash_value = sha256(utf8(string_to_sign)).hexdigest() signature = self.sign_string(hash_value) return signature
def encrypt_data(body): # 如果请求的使用 AES 加密,则加密返回的数据 logger.debug('使用 AES 加密 body') aes_cipher = AESCipher(client.secret_key) body = aes_cipher.encrypt(utf8(body)) # 更新为加密后的数据 self.handler.clear_write_buffer() self.handler.write(body) self.handler.set_header('X-Api-Encrypt-Type', 'aes')
def instance_url(self, uid=None): if not uid and not self.get('uid'): raise ValueError( 'Could not determine which URL to request: %s instance ' 'has invalid ID: %r' % (type(self).__name__, uid), 'id') uid = utils.utf8(self.get('uid', uid)) extn = urllib.quote_plus(uid) return "%s%s/" % (self._class_url(), extn)
def signature_response(self, response_header, request, response_body): string_to_sign = self._response_string_to_sign(response_header, request, response_body) # logger.debug(string_to_sign.decode('utf-8')) # 如果不是 unicode 输出会引发异常 # logger.debug('string_to_sign: %s' % string_to_sign.decode('utf-8')) hash_value = sha256(utf8(string_to_sign)).hexdigest() signature = self.sign_string(hash_value) return signature
def proyecto_elegido(bot, update, user_data): bot.sendChatAction(chat_id=update.callback_query.from_user.id, action=ChatAction.TYPING) # Es la primera vez que entra o cambia tipo de tareas? user_data['tipo_tarea'] = '#Unresolved' if not user_data.get('proyecto'): user_data['proyecto'] = update.callback_query.data logger.info('Elegir Proyecto Opción {}'.format(user_data['proyecto'])) else: user_data['tipo_tarea'] = update.callback_query.data logger.info('Elegir Proyecto Opción {} {}'.format( user_data['proyecto'], user_data['tipo_tarea'])) connection = Connection(user_data['host']['host'], user_data['host']['username'], user_data['host']['pass']) username, email = splitEmail(user_data['host']['username']) query = 'Type: Task and {} and ( Assignee: {} or #Unassigned )'.format( user_data['tipo_tarea'], username) issues = connection.getIssues(user_data['proyecto'], query, 0, 20) keyboard = [] texto = '*Tareas:* \n ' for issue in issues: texto += '\n *[{}]* _{}, {}_\n *Prioridad:* _{}_\n *Resumen:* {} \n'.format( issue['id'], issue['Type'], issue['State'], issue['Priority'], escapeMarkdown(utf8(issue['summary']))) keyboard.append( InlineKeyboardButton(issue['id'], callback_data=issue['id'])) # Agrego posibilidad de ver otras tareas if user_data['tipo_tarea'] == '#Unresolved': keyboard.append( InlineKeyboardButton('Ver solucionadas', callback_data='#Resolved')) else: keyboard.append( InlineKeyboardButton('Ver no solucionadas', callback_data='#Unresolved')) # Acomodo el teclado keyboard = [keyboard[i:i + 3] for i in range(0, len(keyboard), 3)] reply_markup = InlineKeyboardMarkup(keyboard, resize_keyboard=True, one_time_keyboard=True) texto += '\n *Elegí la tarea:*' if len(keyboard) > 0: update.callback_query.edit_message_text(text=texto, reply_markup=reply_markup, parse_mode='Markdown') return ISSUE else: update.callback_query.edit_message_text( text="No hay tareas asignadas a vos! Chau") return ConversationHandler.END
def signature_response(self, response_header, request, response_body): string_to_sign = self._response_string_to_sign( response_header, request, response_body) # logger.debug(string_to_sign.decode('utf-8')) # 如果不是 unicode 输出会引发异常 # logger.debug('string_to_sign: %s' % string_to_sign.decode('utf-8')) hash_value = sha256(utf8(string_to_sign)).hexdigest() signature = self.sign_string(hash_value) return signature
def urlencode(query): """ Same as urllib.urlencode, but supports unicode strings. >>> urlencode({'text':'foo bar'}) 'text=foo+bar' """ query = dict([(k, utils.utf8(v)) for k, v in query.items()]) return urllib.urlencode(query)
def consume_filter(options, **parms): def _filter_js(options, links): urls = [] for url in links: if not url.startswith('javascript:'): urls.append(url) return urls def _filter_site(options, links): urls = [] def _get_tld(url): try: return utils.utf8(tld.get_tld(url)) except: return None redisdb = options['redis'] links = map(lambda x: (x, _get_tld(x)), links) sites = set(filter( lambda x: x is not None, map(lambda x: x[1], links) ) ) site_states = redisdb.smembers('sites_allowed') & sites for url, site in links: if site in site_states: urls.append(url) return urls def _filter_state(options, links): urls = [] redisdb = options['redis'] now = time.time() url_states = redisdb.mget(links) if len(links) > 0 else [] with redisdb.pipeline() as pipe: for i, url in enumerate(links): if url_states[i] is None: urls.append(url) pipe.setex(url, json.dumps({'dispatch_time': now}), options.get('urlstate.expire', 24*3600)) pipe.execute() return urls if consume_isvalid(**parms): links = map(lambda x: string.strip(utils.utf8(x)), parms['data']['links']) for _filter in [_filter_js, _filter_state]: links = list(_filter(options, links)) return links return []
def encrypt_data(self): aes_cipher = AESCipher(self.secret_key) headers_str = json_util.dumps(self.request_data.headers) # 加密 Headers 和 url self.request_data.headers = { 'Content-Type': 'application/octet-stream', 'X-Api-Encrypted-Headers': aes_cipher.encrypt(utf8(headers_str)), 'X-Api-Encrypted-Uri': aes_cipher.encrypt(utf8(self.request_data.uri)) } self.request_data.uri = '/?_t=%d&_nonce=%s' % \ (int(time.time()), text_type(random.random())) # 设置一个新的 url url = self.api_server.strip() + self.request_data.uri if self.request_data.body is not None and len(self.request_data.body) > 0: self.request_data.body = aes_cipher.encrypt(utf8(self.request_data.body)) logger.debug(self.request_data.body) return url
def wsgifunc(self, *middleware, **kw): """Returns a WSGI-compatible function for this application.""" def peep(iterator): """Peeps into an iterator by doing an iteration and returns an equivalent iterator. """ # wsgi requires the headers first # so we need to do an iteration # and save the result for later try: firstchunk = iterator.next() except StopIteration: firstchunk = "" return itertools.chain([firstchunk], iterator) def is_generator(x): return x and hasattr(x, "next") def wsgi(env, start_resp): self.load(env) try: # allow uppercase methods only if web.ctx.method.upper() != web.ctx.method: raise web.nomethod() result = self.handle_with_processors() except web.HTTPError, e: result = e.data if is_generator(result): result = peep(result) else: result = [utils.utf8(result)] status, headers = web.ctx.status, web.ctx.headers start_resp(status, headers) # @@@ # Since the CherryPy Webserver uses thread pool, the thread-local state is never cleared. # This interferes with the other requests. # clearing the thread-local storage to avoid that. # see utils.ThreadedDict for details import threading t = threading.currentThread() if kw.get("cleanup_threadlocal", True) and hasattr(t, "_d"): del t._d return result
def encrypt_data(self): aes_cipher = AESCipher(self.secret_key) headers_str = json_util.dumps(self.request_data.headers) # 加密 Headers 和 url self.request_data.headers = { 'Content-Type': 'application/octet-stream', 'X-Api-Encrypted-Headers': aes_cipher.encrypt(utf8(headers_str)), 'X-Api-Encrypted-Uri': aes_cipher.encrypt(utf8(self.request_data.uri)) } self.request_data.uri = '/?_t=%d&_nonce=%s' % \ (int(time.time()), text_type(random.random())) # 设置一个新的 url url = self.api_server.strip() + self.request_data.uri if self.request_data.body is not None and len( self.request_data.body) > 0: self.request_data.body = aes_cipher.encrypt( utf8(self.request_data.body)) logger.debug(self.request_data.body) return url
def serialize(self, request, content_type, default_serializers=None): """Serializes the wrapped object. Utility method for serializing the wrapped object. Returns a webob.Response object. """ if self.serializer: serializer = self.serializer else: _mtype, _serializer = self.get_serializer(content_type, default_serializers) serializer = _serializer() response = webob.Response() response.status_int = self.code for hdr, value in self._headers.items(): response.headers[hdr] = utils.utf8(str(value)) response.headers['Content-Type'] = utils.utf8(content_type) if self.obj is not None: response.body = serializer.serialize(self.obj) return response
def wsgifunc(self, *middleware, **kw): """Returns a WSGI-compatible function for this application.""" def peep(iterator): """Peeps into an iterator by doing an iteration and returns an equivalent iterator. """ # wsgi requires the headers first # so we need to do an iteration # and save the result for later try: firstchunk = iterator.next() except StopIteration: firstchunk = '' return itertools.chain([firstchunk], iterator) def is_generator(x): return x and hasattr(x, 'next') def wsgi(env, start_resp): self.load(env) try: # allow uppercase methods only if web.ctx.method.upper() != web.ctx.method: raise web.nomethod() result = self.handle_with_processors() except web.HTTPError, e: result = e.data if is_generator(result): result = peep(result) else: result = [utils.utf8(result)] status, headers = web.ctx.status, web.ctx.headers start_resp(status, headers) #@@@ # Since the CherryPy Webserver uses thread pool, the thread-local state is never cleared. # This interferes with the other requests. # clearing the thread-local storage to avoid that. # see utils.ThreadedDict for details import threading t = threading.currentThread() if kw.get('cleanup_threadlocal', True) and hasattr(t, '_d'): del t._d return result
def set_cookie(self, name, value, domain=None, expires=None, path="/", expires_days=None): """Sets the given cookie name/value with the given options.""" name = utils.utf8(name) value = utils.utf8(value) if re.search(r"[\x00-\x20]", name + value): # Don't let us accidentally inject bad stuff raise ValueError("Invalid cookie %r: %r" % (name, value)) cookie = '%s=%s' % (name, value) buf = [cookie] if domain: buf.append('domain=%s' % domain) if expires_days is not None and not expires: expires = datetime.datetime.utcnow() + datetime.timedelta( days=expires_days) if expires: timestamp = calendar.timegm(expires.utctimetuple()) expires = email.utils.formatdate( timestamp, localtime=False, usegmt=True) buf.append('expires=%s' % expires) if path: buf.append('path=%s' % path) self.response.headers.add_header('Set-Cookie', '; '.join(buf))
def decrypt_data(self, body): try: aes_cipher = AESCipher(self.secret_key) if body and len(body) > 0: logger.debug('解密 body') logger.debug(body.encode('hex')) body = aes_cipher.decrypt(utf8(body)) # logger.debug(body.decode('hex')) except Exception as e: logger.error('解密数据出错') logger.error(e) logger.error(traceback.format_exc()) return None return body
def get_fmessages (_s): fmsgs_html = u'' f = _s.ssn.getFlashes() #logging.info('>>>>>>>>>>>>> ok added fmsgs: %r' % f) if f: fmsgsTmpl = Template ( '{%- if fmessages -%}' '{%- for fmsg in fmessages -%}' '<li>{{ fmsg.0 }}</li>' '{%- endfor -%}' '{%- endif -%}' ) fmsgs_html = fmsgsTmpl.render (fmessages= f) # _s.ssn.getFlashes()) # logging.info('>>>>>>>>>>>>> ok tmplate fmsgs: %r' % fmsgs_html) # logging.info('>>>>>>>>>>>>> ok tmplate fmsgs: %r' % str(fmsgs_html)) return u.utf8(fmsgs_html)
def setcookie(name, value, expires="", domain=None, secure=False): """Sets a cookie.""" if expires < 0: expires = -1000000000 kargs = {'expires': expires, 'path':'/'} if domain: kargs['domain'] = domain if secure: kargs['secure'] = secure # @@ should we limit cookies to a different path? cookie = Cookie.SimpleCookie() cookie[name] = urllib.quote(utf8(value)) for key, val in kargs.iteritems(): cookie[name][key] = val header('Set-Cookie', cookie.items()[0][1].OutputString())
def cookies(*requireds, **defaults): """ Returns a `storage` object with all the cookies in it. See `storify` for how `requireds` and `defaults` work. """ cookie = Cookie.SimpleCookie() cookie.load(ctx.env.get('HTTP_COOKIE', '')) try: d = storify(cookie, *requireds, **defaults) for k, v in d.items(): d[k] = urllib.unquote(utf8(v)) return d except KeyError: badrequest() raise StopIteration
def consume_extract(mongodb, **parms): # callbacks extractor_list = { 'html' : consume_extract_html, 'json' : consume_extract_json, 'xml' : consume_extract_xml, 'text' : consume_extract_text, } url = utils.utf8(parms['wget']['url']) #template_type, rule_list = consume_get_rulelist(mongodb, parms['id'], url) template = consume_get_template(mongodb, parms['id']) if template and extractor_list.has_key(template.typ): return extractor_list[template.typ](template, url, base64.b64decode(parms['data']['page'])) #if rule_list and extractor_list.has_key(template_type): # return extractor_list[template_type](rule_list, url, base64.b64decode(parms['data']['page'])) return None, None
def decrypt_data(self, body): try: aes_cipher = AESCipher(self.secret_key) if body and len(body) > 0: logger.debug('解密 body') body = aes_cipher.decrypt(utf8(body)) # logger.debug(body.decode('hex')) except Exception as e: logger.error('解密数据出错') logger.error(e) logger.error(traceback.format_exc()) return None # 由于 requests 的 content 不是 unicode 类型, 为了兼容, 这里改成 utf8 if isinstance(body, text_type): body = body.encode('utf-8') return body
def get_locale_from_accept_header(request, localeTags): """ Detect a locale from request.header 'Accept-Language' The locale with the highest quality factor (q) that most nearly matches our config.locales is returned. rh: webapp2.RequestHandler Note that in the future if all User Agents adopt the convention of sorting quality factors in descending order then the first can be taken without needing to parse or sort the accept header leading to increased performance. (see http://lists.w3.org/Archives/Public/ietf-http-wg/2012AprJun/0473.html) """ header = request.headers.get("Accept-Language", '') parsed = parse_accept_language_header(header) if parsed is None: return None pairs_sorted_by_q = sorted(parsed.items(), key=lambda (lang, q): q, reverse=True) locale = Locale.negotiate( [lang for (lang, q) in pairs_sorted_by_q] , u.config('locales') , sep='_' ) return u.utf8(locale)
def get (_s): if _s.ssn: # ok - so there is a cookie in there now if not 'rtt' in _s.ssn: _s.ssn['rtt'] = u.msNow() - _s.ssn['ts'] # rtt: round trip time logging.debug ('rtt set in ssn') url = _s.request.get('nextUrl') _s.redirect (u.utf8(url)) # go back to the page you first thought of else: ua = _s.request.headers['User-Agent'] res = ua_parse(ua) #client = u.utf8(res) logging.info('cookies disabled on user agent: %s', res) # todo put links in page with cookie enabling advice appropriate to the browser # "your browser platform is {{client}}" # "click here to learn how to enable cookies on {{client}}" _s.serve ('nocookie.html')
def write_file(self, db, collection, data, content_type='', hash_id=False): fs = motor.motor_tornado.MotorGridFS(db, collection=collection) content = BytesIO(utf8(data)) if not hash_id: _id = yield fs.put(content, content_type=content_type) logger.debug(_id) else: md5 = hashlib.md5(content.getvalue()).hexdigest() # file_name = hashlib.sha1(content.getvalue()).digest().encode("base64").rstrip('\n') # TODO 并发情况下, 这里会出问题, 导致可能有相同md5的数据 grid_out = yield fs.find_one({'md5': md5}) if not grid_out: _id = yield fs.put(content, content_type=content_type) else: _id = grid_out._id # 直接让引用计数的 _id 等于 file 的 _id logger.debug(_id) logger.debug(collection) yield db['ref_%s' % collection].update({'_id': _id}, {'$inc': {'count': 1}}, upsert=True) raise gen.Return(_id)
def __init__(self, method, url, headers=None, data=None, files=None, debug=False, cookies=None): assert url.startswith('http') url = utils.utf8(url) self.url = url self.method = method self.data = data or {} self.files = files self.body = None cookies = cookies or {} for name, value in cookies.items(): cookie_manager.set_cookie(name, value) _split_url = httplib.urlsplit(url) self.host = _split_url.netloc self.uri = _split_url.path if _split_url.query: self.uri += '?' + _split_url.query if _split_url.scheme == 'https': Connection = httplib.HTTPSConnection else: Connection = httplib.HTTPConnection self.__conn = Connection(host=self.host) self.__conn.set_debuglevel(debug and logging.DEBUG or 0) self.headers = headers or {} self.generate_header(headers)
def filter(self, text): if text is None: return '' else: return utf8(text)
def open(self, url, data=None, headers={}): """Opens the specified url.""" url = urllib.basejoin(self.url, url) req = urllib2.Request(utf8(url), data, headers) return self.do_request(req)
def convert(value, doseq=False): if doseq and isinstance(value, list): return [convert(v) for v in value] else: return utils.utf8(value)
def proyecto_elegido(bot, update, user_data): bot.sendChatAction(chat_id=update.callback_query.from_user.id, action=ChatAction.TYPING) # Es la primera vez que entra o cambia tipo de tareas? if not user_data.get('proyecto'): user_data['proyecto'] = update.callback_query.data user_data['pagging'] = [0, max_issues_per_page] user_data['tipo_tarea'] = '#{Sin resolver}' logger.info('Elegir Proyecto Opción {}'.format(user_data['proyecto'])) elif update.callback_query.data == '>': user_data['pagging'] = [ user_data['pagging'][0] + max_issues_per_page, user_data['pagging'][1] + max_issues_per_page ] elif update.callback_query.data == '<': user_data['pagging'] = [ user_data['pagging'][0] - max_issues_per_page, user_data['pagging'][1] - max_issues_per_page ] else: user_data['tipo_tarea'] = update.callback_query.data user_data['pagging'] = [0, max_issues_per_page] logger.info('Elegir Proyecto Opción {} {}'.format( user_data['proyecto'], user_data['tipo_tarea'])) logger.info('paginas {}/{}'.format(user_data['pagging'][0], user_data['pagging'][1])) connection = Connection(user_data['host']['host'], user_data['host']['username'], user_data['host']['pass']) username, email = splitEmail(user_data['host']['username']) #of #me #{Sin asignar} -Resolved query = '(asignado a: ' + username + ' o #{Sin asignar}) y ' + user_data[ 'tipo_tarea'] logger.info(query) issues = connection.getIssues(user_data['proyecto'], query, user_data['pagging'][0], max_issues_per_page) #Necesito guardar el numero de issues segun query para el paginado pq es lento if user_data['tipo_tarea'] == '#resuelta': if not user_data.get('issue_count_resueltas'): user_data['issue_count_resueltas'] = connection.getNumberOfIssues( query + ' y #' + user_data['proyecto']) issue_count = user_data['issue_count_resueltas'] else: if not user_data.get('issue_count_no_resueltas'): user_data[ 'issue_count_no_resueltas'] = connection.getNumberOfIssues( query + ' y #' + user_data['proyecto']) issue_count = user_data['issue_count_no_resueltas'] keyboard = [] texto = '*Tareas:* \n ' for issue in issues: texto += '\n *[{}]* _{}, {}_\n *Prioridad:* _{}_\n *Resumen:* {} \n'.format( issue['id'], issue['Type'], issue['State'], issue['Priority'], escapeMarkdown(utf8(issue['summary']))) keyboard.append( InlineKeyboardButton(issue['id'], callback_data=issue['id'])) # Agrego posibilidad de ver otras tareas if user_data['tipo_tarea'] == '#{Sin resolver}': keyboard.append( InlineKeyboardButton('Ver solucionadas', callback_data='#resuelta')) else: keyboard.append( InlineKeyboardButton('Ver no solucionadas', callback_data='#{Sin resolver}')) #Paginado if user_data['pagging'][0] > 0: keyboard.append(InlineKeyboardButton('<', callback_data='<')) if len(issues) >= 5: keyboard.append(InlineKeyboardButton('>', callback_data='>')) # Acomodo el teclado keyboard = [keyboard[i:i + 3] for i in range(0, len(keyboard), 3)] reply_markup = InlineKeyboardMarkup(keyboard, resize_keyboard=True, one_time_keyboard=True) texto += '\n *Elegí la tarea:*' if len(keyboard) > 0: update.callback_query.edit_message_text(text=texto, reply_markup=reply_markup, parse_mode='Markdown') return ISSUE else: update.callback_query.edit_message_text( text="No hay tareas asignadas a vos! Chau") return ConversationHandler.END
result = self.handle_with_processors() except NotFound: web.ctx.status = "404 Not Found" result = self.notfound() except web.HTTPError, e: result = e.data except: print >> web.debug, traceback.format_exc() web.ctx.status = '500 Internal Server Error' web.header('Content-Type', 'text/html') result = self.internalerror() if is_generator(result): result = peep(result) else: result = [utils.utf8(result)] status, headers = web.ctx.status, web.ctx.headers start_resp(status, headers) #@@@ # Since the CherryPy Webserver uses thread pool, the thread-local state is never cleared. # This interferes with the other requests. # clearing the thread-local storage to avoid that. # see utils.ThreadedDict for details import threading t = threading.currentThread() if hasattr(t, '_d'): del t._d return result
def sign_string(self, string_to_sign): new_hmac = hmac.new(utf8(self.secret_key), digestmod=sha256) new_hmac.update(utf8(string_to_sign)) return new_hmac.digest().encode("base64").rstrip('\n')