def __init__(self, host, database, user=None, password=None, max_idle_time=7 * 3600): self.host = host self.database = database self.max_idle_time = max_idle_time args = dict(conv=CONVERSIONS, use_unicode=True, charset="utf8", db=database, init_command='SET time_zone = "+0:00"', sql_mode="TRADITIONAL") if user is not None: args["user"] = user if password is not None: args["passwd"] = password # We accept a path to a MySQL socket file or a host(:port) string if "/" in host: args["unix_socket"] = host else: self.socket = None pair = host.split(":") if len(pair) == 2: args["host"] = pair[0] args["port"] = int(pair[1]) else: args["host"] = host args["port"] = 3306 self._db = None self._db_args = args self._last_use_time = time.time() try: self.reconnect() except Exception: gen_log.error("Cannot connect to MySQL on %s", self.host, exc_info=True)
def load_gettext_translations(directory, domain): """Loads translations from gettext's locale tree Locale tree is similar to system's /usr/share/locale, like: {directory}/{lang}/LC_MESSAGES/{domain}.mo Three steps are required to have you app translated: 1. Generate POT translation file xgettext --language=Python --keyword=_:1,2 -d cyclone file1.py file2.html etc 2. Merge against existing POT file: msgmerge old.po cyclone.po > new.po 3. Compile: msgfmt cyclone.po -o {directory}/pt_BR/LC_MESSAGES/cyclone.mo """ import gettext global _translations global _supported_locales global _use_gettext _translations = {} for lang in os.listdir(directory): if lang.startswith('.'): continue # skip .svn, etc if os.path.isfile(os.path.join(directory, lang)): continue try: os.stat(os.path.join(directory, lang, "LC_MESSAGES", domain + ".mo")) _translations[lang] = gettext.translation(domain, directory, languages=[lang]) except Exception, e: gen_log.error("Cannot load translation for '%s': %s", lang, str(e)) continue
def send_error(self, status_code=500, **kwargs): """Sends the given HTTP error code to the browser. If `flush()` has already been called, it is not possible to send an error, so this method will simply terminate the response. If output has been written but not yet flushed, it will be discarded and replaced with the error page. Override `write_error()` to customize the error page that is returned. Additional keyword arguments are passed through to `write_error`. """ if self._headers_written: gen_log.error("Cannot send error response after headers written") if not self._finished: self.finish() return # Need keep headers #self.clear() reason = kwargs.get('reason') if 'exc_info' in kwargs: exception = kwargs['exc_info'][1] if isinstance(exception, HTTPError) and exception.reason: reason = exception.reason self.set_status(status_code, reason=reason) try: self.write_error(status_code, **kwargs) except Exception: app_log.error("Uncaught exception in write_error", exc_info=True) if not self._finished: self.finish()
def get(self, uid, tid): ota = yield self.db_temp.get_ota(tid) if ota is None: gen_log.error("Not ota field") self.set_status(400) self.finish({"error", "Can't found ota status."}) self.finish(jsonify(ota))
def set_blocking_signal_threshold(self, seconds, action): if not hasattr(signal, "setitimer"): gen_log.error("set_blocking_signal_threshold requires a signal module " "with the setitimer method") return self._blocking_signal_threshold = seconds if seconds is not None: signal.signal(signal.SIGALRM, action if action is not None else signal.SIG_DFL)
def main(): config.init(persistent_path) tornado.options.parse_config_file(conf_path, final=False) tornado.options.parse_command_line() key = config.get_option('cookie_secret') if key is None: gen_log.error('Fatal: secret key not found. ' 'Run `manage.py keys` to create it') sys.exit() settings['cookie_secret'] = key auth.init() application = tornado.web.Application( routes, **settings ) server = tornado.httpserver.HTTPServer(application) server.listen(options.server_port) tornado.ioloop.IOLoop.instance().start()
def make_static_url(cls, settings, path): """Constructs a versioned url for the given path. This method may be overridden in subclasses (but note that it is a class method rather than an instance method). ``settings`` is the `Application.settings` dictionary. ``path`` is the static path being requested. The url returned should be relative to the current host. """ abs_path = os.path.join(settings["static_path"], path) with cls._lock: hashes = cls._static_hashes if abs_path not in hashes: try: f = open(abs_path, "rb") hashes[abs_path] = hashlib.md5(f.read()).hexdigest() f.close() except Exception: gen_log.error("Could not open static file %r", path) hashes[abs_path] = None hsh = hashes.get(abs_path) static_url_prefix = settings.get("static_url_prefix", "/static/") if hsh: return static_url_prefix + path + "?v=" + hsh[:5] else: return static_url_prefix + path
def _server_request_loop(self, delegate): try: while True: conn = HTTP1Connection(self.stream, False, self.params, self.context) request_delegate = delegate.start_request(self, conn) try: ret = yield conn.read_response(request_delegate) except (iostream.StreamClosedError, iostream.UnsatisfiableReadError): return except _QuietException: # This exception was already logged. conn.close() return except Exception as e: if 1 != e.errno: gen_log.error("Uncaught exception", exc_info=True) conn.close() return if not ret: return yield gen.moment finally: delegate.on_close(self)
async def _server_request_loop( self, delegate: httputil.HTTPServerConnectionDelegate ) -> None: try: while True: conn = HTTP1Connection(self.stream, False, self.params, self.context) request_delegate = delegate.start_request(self, conn) try: ret = await conn.read_response(request_delegate) except ( iostream.StreamClosedError, iostream.UnsatisfiableReadError, asyncio.CancelledError, ): return except _QuietException: # This exception was already logged. conn.close() return except Exception: gen_log.error("Uncaught exception", exc_info=True) conn.close() return if not ret: return await asyncio.sleep(0) finally: delegate.on_close(self)
def get_user(self, token): if not token: raise gen.Return((None, "Requires authentication")) user = None result = yield self.db_user.get_user_by_token(token) if result is None: try: data = yield sso.auth_token(token) print data['ext'].get('picture') doc_user = { 'id': data['user_id'], 'sign_in_provider': data['ext']['firebase']['sign_in_provider'], 'email': '' or data['ext'].get('email'), 'picture': '' or data['ext'].get('picture'), 'tokens': [ { 'token': data['token'], 'expire': datetime.utcfromtimestamp(int(data['expire'])) } ]} yield self.db_user.add_user(data['user_id'], doc_user) user = {'id': data['user_id']} except Exception as e: gen_log.error(e) raise gen.Return((None, "SSO authenticate token failure, {}".format(str(e)))) else: result = yield self.db_user.is_expire(token) if result is None: raise gen.Return((None, "Authentication has expired")) user = {'id': result['id'], 'token': token} raise gen.Return((user, ''))
def _callback(fut): exc = fut.exc_info() if exc: if not isinstance(exc[1], etcdexcept.EtcdException): # We can't get the list of machines, if one server is in the # machines cache, try on it _log.error("Failed to get list of machines from %s%s: %r and retry it.", uri, self.version_prefix, exc) if self._machines_cache: self._base_url = self._machines_cache.pop(0) _log.debug("Retrying on %s", self._base_url) # Call myself self.ioloop.add_future(self.search_machine(), _callback) return else: raise etcdexcept.EtcdException("Could not get the list of servers, " "maybe you provided the wrong " "host(s) to connect to?") else: response = fut.result() machines = [ node.strip() for node in self._handle_server_response(response).body.decode('utf-8').split(',') ] _log.debug("Retrieved list of machines: %s", machines) self._machines_cache = machines if self._base_url not in self._machines_cache: self._base_url = self._choice_machine() callback(fut.result())
def _run(self): if not self._running: return self._running = False try: self.callback() except Exception: gen_log.error("Error in delayed callback", exc_info=True)
def _handle_events(self, fd, events): """This method is the actual handler for IOLoop, that gets called whenever an event on my socket is posted. It dispatches to _handle_recv, etc.""" # print "handling events" if not self.socket: gen_log.warning("Got events for closed stream %s", fd) return try: # dispatch events: if events & IOLoop.ERROR: gen_log.error("got POLLERR event on ZMQStream, which doesn't make sense") return if events & IOLoop.READ: self._handle_recv() if not self.socket: return if events & IOLoop.WRITE: self._handle_send() if not self.socket: return # rebuild the poll state self._rebuild_io_state() except: gen_log.error("Uncaught exception, closing connection.", exc_info=True) self.close() raise
def _read_to_buffer(self): """Reads from the socket and appends the result to the read buffer. Returns the number of bytes read. Returns 0 if there is nothing to read (i.e. the read returns EWOULDBLOCK or equivalent). On error closes the socket and raises an exception. """ try: chunk = self.read_from_fd() except (socket.error, IOError, OSError) as e: # ssl.SSLError is a subclass of socket.error if e.args[0] == errno.ECONNRESET: # Treat ECONNRESET as a connection close rather than # an error to minimize log spam (the exception will # be available on self.error for apps that care). self.close(exc_info=True) return self.close(exc_info=True) raise if chunk is None: return 0 self._read_buffer.append(chunk) self._read_buffer_size += len(chunk) if self._read_buffer_size >= self.max_buffer_size: gen_log.error("Reached maximum read buffer size") self.close() raise IOError("Reached maximum read buffer size") return len(chunk)
def _execute(self, cursor, query, parameters): try: return cursor.execute(query, parameters) except OperationalError: gen_log.error("Error connecting to MySQL on %s", self.host) self.close() raise
def _handle_request_exception(self, e): if not isinstance(e, Interruption): return tornado.web.RequestHandler._handle_request_exception(self, e) # copy of tornado.web.RequestHandler._handle_request_exception # but remove exception report if isinstance(e, tornado.web.Finish): # Not an error; just finish the request without logging. if not self._finished: self.finish() return # this is not an error # do not report exception # self.log_exception(*sys.exc_info()) if self._finished: # Extra errors after the request has been finished should # be logged, but there is no reason to continue to try and # send a response. return if isinstance(e, tornado.web.HTTPError): if e.status_code not in tornado.httputil.responses and not e.reason: gen_log.error("Bad HTTP status code: %d", e.status_code) else: self.send_error(e.status_code, exc_info=sys.exc_info()) return self.send_error(500, exc_info=sys.exc_info())
def _load_version(abs_path): try: hsh = cls.get_content_version(abs_path) mtm = cls.get_content_modified_time(abs_path) return mtm, hsh except Exception: gen_log.error("Could not open static file %r", abs_path) return None, None
def api(self, path, **kwargs): try: import time s_time = time.time() data = yield self._make_request(path, **kwargs) e_time = time.time() gen_log.info("=====Time request wio api, {}".format(float(e_time)-float(s_time))) except Exception as e: gen_log.error(e) raise raise gen.Return(data)
def _get_func(self, request, m, func): try: cls = [] if hasattr(request, m): cls = getattr(request, m) if len(cls): cls = cls.pop() return getattr(cls, func) except Exception, ex: gen_log.error(ex)
def _make_request(self, path, query=None, method="GET", body=None, headers=None): """ Makes request on `path` in the graph. path -- endpoint to the facebook graph api query -- A dictionary that becomes a query string to be appended to the path method -- GET, POST, etc body -- message body headers -- Like "Content-Type" """ if not query: query = {} if self.access_token: query["access_token"] = self.access_token query_string = urllib.urlencode(query) if query else "" if method == "GET": body = None else: if headers and "json" in headers.get('Content-Type'): body = json.dumps(body) if body else "" else: body = urllib.urlencode(body) if body else "" url = BASE_URL + path if query_string: url += "?" + query_string # url = "https://wio.temp-io.life/v1/nodes/create?access_token=123" gen_log.info("URL=====> {}".format(url)) gen_log.info("method=====> {}".format(method)) gen_log.info("body=====> {}".format(body)) client = AsyncHTTPClient() request = HTTPRequest(url, method=method, body=body, headers=headers) try: response = yield client.fetch(request) except HTTPError as e: raise WioAPIError(e) except Exception as e: gen_log.error(e) raise content_type = response.headers.get('Content-Type') gen_log.info("#### content_type: {}".format(content_type)) gen_log.info("#### body: {}".format(response.body)) if 'json' in content_type: data = json.loads(response.body.decode()) else: raise WioAPIError('Maintype was not json') raise gen.Return(data)
def cpu_count(): """Returns the number of processors on this machine.""" try: return multiprocessing.cpu_count() except NotImplementedError: pass try: return os.sysconf("SC_NPROCESSORS_CONF") except ValueError: pass gen_log.error("Could not detect number of processors; assuming 1") return 1
def delete(self, uid, tid): # TODO: (ten) authenticated uid is correct? yield self.db_temp.del_temp(tid) wio = Wio(self.current_user['token']) try: yield wio.del_thing(tid) except Exception as e: gen_log.error(e) self.set_status(400) self.finish({"error": "del_thing error"}) return self.set_status(204) self.finish()
def _get_cached_version(cls, abs_path): with cls._lock: hashes = cls._static_hashes if abs_path not in hashes: try: hashes[abs_path] = cls.get_content_version(abs_path) except Exception: gen_log.error("Could not open static file %r", abs_path) hashes[abs_path] = None hsh = hashes.get(abs_path) if hsh: return hsh return None
def _run_callback(self, callback, *args, **kwargs): """Wrap running callbacks in try/except to allow us to close our socket.""" try: # Use a NullContext to ensure that all StackContexts are run # inside our blanket exception handler rather than outside. with stack_context.NullContext(): callback(*args, **kwargs) except: gen_log.error("Uncaught exception in ZMQStream callback", exc_info=True) # Re-raise the exception so that IOLoop.handle_callback_exception # can see it and log the error raise
def _handle_events(self, fd, events): if self.closed(): gen_log.warning("Got events for closed stream %s", fd) return try: if self._connecting: self._handle_connect() if self.closed(): return if events & self.io_loop.READ: # NOTE: We use explict read instead of implicit. # The reason IOStream is not idle is that when an event happened, # tornado iostream will still try to read them into buffer. # Our approach is that when someone is trying to read the iostream, # we will read it. if self._should_socket_close() or self.reading(): self._handle_read() if self.closed(): return if events & self.io_loop.WRITE: self._handle_write() if self.closed(): return if events & self.io_loop.ERROR: self.error = self.get_fd_error() self.io_loop.add_callback(self.close) return state = self.io_loop.ERROR if self.reading(): state |= self.io_loop.READ if self.writing(): state |= self.io_loop.WRITE if state == self.io_loop.ERROR and self._read_buffer_size == 0: state |= self.io_loop.READ if state != self._state: assert self._state is not None, \ "shouldn't happen: _handle_events without self._state" self._state = state self.io_loop.update_handler(self.fileno(), self._state) except UnsatisfiableReadError as e: gen_log.info("Unsatisfiable read, closing connection: %s" % e) self.close(exc_info=True) except Exception: gen_log.error("Uncaught exception, closing connection.", exc_info=True) self.close(exc_info=True) raise
def post(self, uid, tid): wio = Wio(self.current_user['token']) try: activated = yield wio.get_activation(tid) except Exception as e: gen_log.error(e) self.set_status(400) self.finish({"error": "Get activation is failure on Wio, {}".format(e.message)}) return if activated is True: yield self.db_temp.update_temp(tid, {"activated": activated}) self.set_status(204) self.finish() else: self.set_status(400) self.finish({"error": "Verify activation failure."})
def _handle_recv(self): """Handle a recv event.""" if self._flushed: return try: msg = self.socket.recv_multipart(zmq.NOBLOCK, copy=self._recv_copy) except zmq.ZMQError as e: if e.errno == zmq.EAGAIN: # state changed since poll event pass else: gen_log.error("RECV Error: %s"%zmq.strerror(e.errno)) else: if self._recv_callback: callback = self._recv_callback # self._recv_callback = None self._run_callback(callback, msg)
def _handle_send(self): """Handle a send event.""" if self._flushed: return if not self.sending(): gen_log.error("Shouldn't have handled a send event") return msg, kwargs = self._send_queue.get() try: status = self.socket.send_multipart(msg, **kwargs) except zmq.ZMQError as e: gen_log.error("SEND Error: %s", e) status = e if self._send_callback: callback = self._send_callback self._run_callback(callback, msg, status)
def _check_cluster_id(self, response): cluster_id = response.headers.get("x-etcd-cluster-id") if not cluster_id: _log.warning("etcd response did not contain a cluster ID") return id_changed = (self.expected_cluster_id and cluster_id != self.expected_cluster_id) # Update the ID so we only raise the exception once. old_expected_cluster_id = self.expected_cluster_id self.expected_cluster_id = cluster_id if id_changed: # Defensive: clear the pool so that we connect afresh next # time. self._base_url = self._choice_machine() _log.error( 'The UUID of the cluster changed from {} to ' '{}.'.format(old_expected_cluster_id, cluster_id))
def _callback_wrapper(): try: # Use a NullContext to ensure that all StackContexts are run # inside our blanket exception handler rather than outside. with stack_context.NullContext(): callback(*args, **kwargs) except Exception as ex: gen_log.error("Uncaught exception: %s", ex) # Close the socket on an uncaught exception from a user callback # (It would eventually get closed when the socket object is # gc'd, but we don't want to rely on gc happening before we # run out of file descriptors) channel.close() # Re-raise the exception so that IOLoop.handle_callback_exception # can see it and log the error raise
def set_blocking_signal_threshold(self, seconds, action): """Sends a signal if the ioloop is blocked for more than s seconds. Pass seconds=None to disable. Requires python 2.6 on a unixy platform. The action parameter is a python signal handler. Read the documentation for the python 'signal' module for more information. If action is None, the process will be killed if it is blocked for too long. """ if not hasattr(signal, "setitimer"): gen_log.error( "set_blocking_signal_threshold requires a signal module " "with the setitimer method") return self._blocking_signal_threshold = seconds if seconds is not None: signal.signal(signal.SIGALRM, action if action is not None else signal.SIG_DFL)
def update_task_wait_check(self): post_json = { 'task_status_id': TASK_STATUS_WAIT_CHECK, 'update_time': datetime.strftime(datetime.now(), DATETIME_FMT), 'result': self.result, 'task_name': self.task_name } url = '%s/%s' % (self.async_api_url, TASK_URI) try: response = yield http_client(url, 'PATCH', self.cookie, post_json) except HTTPError as err: gen_log.error('Update Task %s Success Error:%s' % (self.task_name, err.message)) raise Exception('Update Task %s Success Error:%s' % (self.task_name, err.message)) else: result = json_decode(response.body) if result['status'] < 0: gen_log.error('Task : %s ,Update Wait Check:Error:%s' % (self.task_name, result['msg'])) raise Exception('Update Wait Check <%s> error: %s' % (self.task_name, result['msg'])) else: gen_log.info('Task : %s ,Update Wait Check:Done' % self.task_name)
def post(self, uid): """create a temp-io device on server""" if self.req_user and self.req_user['myself'] is False: raise HTTPError(400, "No operation permission") wio = Wio(self.req_token) try: thing = yield wio.add_thing() except Exception as e: gen_log.error(e) raise HTTPError( 400, "Create temp-io is failure on built-in Wio server, {}".format( str(e))) cur_time = datetime.utcnow() document = { "uid": uid, "id": thing['id'], "key": thing['key'], # "online": False, "board_type_id": 1, "temperature": None, "temperature_f": None, "temperature_updated_at": None, "read_period": 60, "has_sleep": True, "status": "", "status_text": "", "open": False, "activated": False, "name": "", "description": "", "private": False, "gps": "", "picture_url": "", "updated_at": cur_time, "created_at": cur_time } result = yield self.db_temp.add_temp(document) data = jsonify(result) self.set_status(201) self.finish(data)
def load_gettext_translations(directory, domain): """Loads translations from `gettext`'s locale tree Locale tree is similar to system's ``/usr/share/locale``, like:: {directory}/{lang}/LC_MESSAGES/{domain}.mo Three steps are required to have you app translated: 1. Generate POT translation file:: xgettext --language=Python --keyword=_:1,2 -d mydomain file1.py file2.html etc 2. Merge against existing POT file:: msgmerge old.po mydomain.po > new.po 3. Compile:: msgfmt mydomain.po -o {directory}/pt_BR/LC_MESSAGES/mydomain.mo """ import gettext global _translations global _supported_locales global _use_gettext _translations = {} for lang in os.listdir(directory): if lang.startswith('.'): continue # skip .svn, etc if os.path.isfile(os.path.join(directory, lang)): continue try: os.stat(os.path.join(directory, lang, "LC_MESSAGES", domain + ".mo")) _translations[lang] = gettext.translation(domain, directory, languages=[lang]) except Exception as e: gen_log.error("Cannot load translation for '%s': %s", lang, str(e)) continue _supported_locales = frozenset(list(_translations.keys()) + [_default_locale]) _use_gettext = True gen_log.debug("Supported locales: %s", sorted(_supported_locales))
def post(self): def _get_stix_package(ioc_xml): if ioc_xml is not None and len(ioc_xml) > 0: ns = namespaces.Namespace("http://openioc.org/openioc", "openioc", "") idgen.set_id_namespace(ns) stix_obj = to_stix(BytesIO(ioc_xml)) stix_package = STIXPackage_v1.from_xml( etree.fromstring(stix_obj.to_xml())) return stix_package else: raise RuntimeError('request body is empty.') try: msg = None for field_name, files in self.request.files.items(): for file in files: filename, content_type = file['filename'], file[ 'content_type'] body = file['body'] gen_log.debug('POST "%s" "%s" %d bytes', filename, content_type, len(body)) gen_log.debug('POST file body:\n"%s"', body) stix_package = _get_stix_package(body) if stix_package is not None: patterns = common.get_search_items(stix_package) msg = {'fields': patterns} self.http_normal(200, msg=msg if msg is not None else u'OK') except: trace_msg = traceback.format_exc().decode('utf-8') emsg = u'request_msg:{0} {1}'.format(self.request.body, trace_msg) gen_log.error(u',[session-id:{0}],{1}'.format(None, emsg)) self.http_error(400, msg=trace_msg)
def upload_temperature_data(): """ curl -v -X POST -d "{\"temperature\": 25}" $HOST_NAME/api/v1/$ACCESS_TOKEN/telemetry --header "Content-Type:application/json" """ gen_log.info("upload_temperature_data") try: data = {"temperature": random.randint(20, 100)} url = url_prefix + '/v1/{}/telemetry'.format(token) res = yield AsyncHTTPClient().fetch(url, method='POST', body=json.dumps(data), headers=headers) #res=requests.post(url,data=json.dumps(data),headers=headers) gen_log.info(res.code) except Exception as ex: gen_log.error(ex) IOLoop.current().add_timeout(time.time() + 1, upload_temperature_data)
def main(): frame = PIL.Image.open('assets/test.jpg').convert(mode='RGB') while True: # start timer nxt = gen.sleep(options.capture_interval) stamped = timestamp(frame.copy()) stream = image_to_stream(stamped) try: response = yield post_data('http://localhost:8888/upload', 'image_capture', stream.getvalue(), 'image/jpeg') except Exception as ex: gen_log.error(ex) yield nxt
def get_uefi_boot_json(self): boot_json = dict() fetch_url = 'https://%s/%s' % (self.ilo4_host, self.uefi_boot_url) try: response = yield self.get_response(fetch_url, 'GET') result = json_decode(response.body) except HTTPError as err: gen_log.error('Get %s Boot Json Error: %s' % (self.ilo4_host, err.message)) raise Exception('Get %s Boot Json Error: %s' % (self.ilo4_host, err.message)) else: result = json_decode(response.body) for boot_option in result['Boot'][ self.uefi_target_boot_source_override_supported]: if re.search(self.boot_selection, boot_option): boot_json["Boot"] = dict() boot_json["Boot"][ self. uefi_target_boot_source_override_option] = boot_option raise Return(boot_json)
def _handle_request_exception(self, e): # 定制异常错误信息,透传错误的message if isinstance(e, Finish): if not self._finished: self.finish() return self.log_exception(*sys.exc_info()) if self._finished: return if isinstance(e, HTTPError): if e.status_code not in httputil.responses and not e.reason: gen_log.error("Bad HTTP status code: %d", e.status_code) self.send_error(500, exc_info=sys.exc_info(), message=e.log_message) else: self.send_error(e.status_code, exc_info=sys.exc_info(), message=e.log_message) else: self.send_error(500, exc_info=sys.exc_info(), message=e.message)
def process_message(self, data, method): '''Need to process the data. Then check with the database to see if the collection.document pair is valid or not. After this, the data will be added to result, and all subscriber will be checked and the message will be sent to those subscribed to this particular channel. ''' try: result = self.db_connector.get_doc_info(method.routing_key) except Exception as e: gen_log.error( "Couldn't get collection and document info: {0}".format(e)) return result['data'] = data for subscriber in self._handlers: gen_log.info("message came from, {0}".format(method.routing_key)) if subscriber.has_subscribed_to(result['collection']): subscriber.write_message(result) gen_log.info('PikaConnector: notified %s' % repr(subscriber))
def query(cls, field: str, value: object) -> (None, list): """根据指定字段查询 :param cls: :param field: 字段 :param value: 值 :return: ``None`` 或者 ``list`` """ if not SESSION: gen_log.error('session is null') return None session = SESSION() try: ret = session.query(cls).filter(field == value).all() session.close() return ret except Exception as e: gen_log.warning( 'query {model}({field}={value}) failed, error: {e}'.format( model=cls, field=field, value=value, e=e)) session.close() return None
def post(self, uid, tid): if self.req_user and self.req_user['myself'] is False: raise HTTPError(400, "No operation permission") thing_key = self.temp['key'] wio = Wio(thing_key) try: result = yield wio.add_ota(self.temp['board_type_id']) except Exception as e: gen_log.error(e) raise HTTPError(400, "Trigger OTA is failure, {}".format(str(e))) ota = yield self.db_temp.update_ota( tid, { "status": result['status'], "status_text": result['status_text'] }) self.set_status(202) self.add_header("Location", "") # TODO, get status url self.finish(jsonify(ota)) IOLoop.current().add_callback(self.get_ota, tid, thing_key)
def process_request(self, handler): current_user_agent = handler.request.headers.get('User-Agent', None) if current_user_agent: for user_agent in settings.DISALLOWED_USER_AGENTS: if user_agent.search(current_user_agent): self.is_finished = True gen_log.error('Forbidden (User agent): %s', handler.request.path, extra={ 'status_code': 403, 'request': handler.request }) raise HttpForbiddenError() if settings.ILLEGAL_CHARACTER_FORBIDDEN: # 验证是否包含非法字符 illegals = settings.ILLEGAL_CHARACTER if handler.request.arguments: query = str(handler.request.query_arguments) validate_illegal(query, illegals) if handler.request.body: query = str(handler.request.body) validate_illegal(query, illegals)
def get_ota(self, tid, thing_key): """Long polling on Wio v1""" wio = Wio(thing_key) while 1: try: result = yield wio.get_ota() except httpclient.HTTPError as e: if e.code == 599: continue elif e.code == 400: gen_log.error(e) break except Exception as e: gen_log.error(e) break yield self.db_temp.update_ota(tid, { "status": result['status'], "status_text": result['status_text'] }) if result['status'] == "error" or result['status'] == "done": break
def _check_file(modify_times, module, path): try: modified = os.stat(path).st_mtime except Exception: return if path not in modify_times: modify_times[path] = modified return if modify_times[path] != modified: gen_log.info("%s modified; restarting server", path) modify_times[path] = modified else: return try: importlib.reload(module) except Exception: gen_log.error("Error importing %s, not reloading" % (path, )) traceback.print_exc() return False return True
def _run_callback(self, callback, *args, **kwargs): """Wrap running callbacks in try/except to allow us to close our socket.""" try: # Use a NullContext to ensure that all StackContexts are run # inside our blanket exception handler rather than outside. with stack_context.NullContext(): callback(*args, **kwargs) except Exception as ex: gen_log.error("Uncaught exception: %s", ex) # Close the socket on an uncaught exception from a user callback # (It would eventually get closed when the socket object is # gc'd, but we don't want to rely on gc happening before we # run out of file descriptors) self.close() # Re-raise the exception so that IOLoop.handle_callback_exception # can see it and log the error raise
def load_gettext_translations(directory: str, domain: str) -> None: """Loads translations from `gettext`'s locale tree Locale tree is similar to system's ``/usr/share/locale``, like:: {directory}/{lang}/LC_MESSAGES/{domain}.mo Three steps are required to have your app translated: 1. Generate POT translation file:: xgettext --language=Python --keyword=_:1,2 -d mydomain file1.py file2.html etc 2. Merge against existing POT file:: msgmerge old.po mydomain.po > new.po 3. Compile:: msgfmt mydomain.po -o {directory}/pt_BR/LC_MESSAGES/mydomain.mo """ global _translations global _supported_locales global _use_gettext _translations = {} for filename in glob.glob( os.path.join(directory, "*", "LC_MESSAGES", domain + ".mo")): lang = os.path.basename(os.path.dirname(os.path.dirname(filename))) try: _translations[lang] = gettext.translation(domain, directory, languages=[lang]) except Exception as e: gen_log.error("Cannot load translation for '%s': %s", lang, str(e)) continue _supported_locales = frozenset( list(_translations.keys()) + [_default_locale]) _use_gettext = True gen_log.debug("Supported locales: %s", sorted(_supported_locales))
def post(self, *args, **kwargs): cid, = self.getArgument_list("cid") uid, name, phone, pwd, status, products = \ self.getArgument_list("uid", "name", "phone", "pwd", "status", "bind_products[]") ep_id = self.user.get("ep") cur_user_id = self.user.get("id") if all((uid, name, phone, status, products)): products = [int(p) for p in products] try: customer = self.db.query(Staff).filter(Staff.id == uid).one() customer.name = name customer.phone = phone if "" != pwd: salt, password = customer.genPassword(pwd) customer.salt = salt customer.passwd = password for cur_rel in customer.ep_rel: if cur_rel.product_id in products: products.remove(cur_rel.product_id) elif cur_rel.product_id != 0: customer.ep_rel.remove(cur_rel) self.db.delete(cur_rel) for p in products: uer = StaffEnterpriseRel() uer.ep_id = ep_id uer.cm_id = customer.id uer.product_id = p uer.create_tm = datetime.now() uer.create_method = USER_CREATE_METHOD_ENTERPRISE uer.uid = cur_user_id customer.ep_rel.append(uer) self.db.commit() return self.redirect(self.reverse_url("customer_index")) except Exception as e: self.db.rollback() gen_log.error(e) flash(self, str(e)) else: flash(self, "some parameters lost") return self.redirect(full_url(self, "customer_detail", {"cid": cid}))
def run_ansible_task(self): gen_log.info('Task %s Start' % self.task_name) try: # 更新任务状态 try: yield self.update_task_handler() except Exception as err: raise err # 获取ansible service ansible_url = '' try: ansible_service = yield self.get_ansible_service() ansible_url = 'http://%s/%s' % (ansible_service, 'ansible/task') except Exception as err: gen_log.error(err.args) raise Exception('Get Ansible Service Error') # 执行ansible任务 try: response = yield http_client(ansible_url, 'POST', None, self.ansible_json, 60) except HTTPError as err: gen_log.error('Run Ansible %s %s Error:%s' % (self.task_name, ansible_url, err.args)) self.result = self.return_json(-1, err.message) yield self.update_task_fail() else: result = json_decode(response.body) self.result = result try: if eq(result['status'], 0): yield self.update_task_success() else: yield self.update_task_fail() except HTTPError as err: raise err except Exception as err: self.result = self.return_json(-1, err.args) yield self.update_task_fail()
def password(cls, user_name, user_upwd, user_npwd, reset=0): """ #修改密码 :param user_name: 帐号 :param user_upwd: 原密码 :param user_npwd: 新密码 :param reset: 是否重置密码 """ if not SESSION: return 0, '数据库连接错误,请联系后台管理员', None err_msg = '' session = SESSION() try: if reset: # 重置密码 ret = session.query(cls).filter(user_name=user_name).update( { cls.user_upwd: user_npwd, cls.user_dpwd: user_npwd }, synchronize_session=False) else: # 修改密码 ret = session.query(cls).filter( and_( cls.user_name == user_name, or_(cls.user_upwd == user_upwd, cls.user_dpwd == user_upwd))).update( { cls.user_upwd: user_npwd, cls.user_dpwd: user_npwd }, synchronize_session=False) session.commit() session.close() gen_log.debug('ret: {0}'.format(ret)) return 200, '密码修改成功,请牢记,如果忘记请联系系统管理员', ret except Exception as e: err_msg = str(e) gen_log.error(str(e)) session.close() return 0, '密码修改失败: {0}'.format(err_msg), None
async def execute(self, option: str, params: str) -> None: """ 执行对应的options """ # 获得操作对应的的_handler _handler = self._handlers.get(option) if _handler is None: gen_log.error("Can't find options {}".format(option)) await self.on_err(code="300") return # 序列化参数列表 try: params = json.loads(params) except Exception as e: gen_log.error(e) await self.on_err(code="400") return # 执行对应的操作 try: await _handler(params) except Exception as e: gen_log.error(e) await self.on_err(code=e.__str__())
def login(cls, user_name, user_upwd, user_ip): """ #用户登录 :param cls: :param user_name: 帐号 :param user_upwd: 密码 :param user_ip: ip """ if not SESSION: gen_log.error('session is null') return None err_msg = '' session = SESSION() try: user = session.query(cls).filter(user_name=user_name).first() session.close() if not user: return 0, '登录失败:用户不存在', None if user.user_dpwd == user_upwd: return 0, '不能使用默认密码登录,请先修改密码', None if user.user_upwd != user_upwd: return 0, '密码不正确', None if not user.user_state: return 0, '该帐号已被加锁:{0}'.format(user.user_mark), None if (datetime.now() - user.valid_time).days > 30: return 0, '密码已到期,请修改密码', None # 校验用户ip是否在运行的ip段内 ip_valid = False for ip in user.user_ips.split(';'): if user_ip.startswith(ip): ip_valid = True break if not ip_valid: return 0, '该帐号不允许在该设备上登录,请联系管理员', None return 200, '登录成功', user.to_dict() except Exception as e: err_msg = str(e) gen_log.error(str(e)) session.close() return 0, '登录失败: {0}'.format(err_msg), None
def get_user(self, token): if not token: raise gen.Return((None, "Requires authentication")) user = yield self.db_user.get_user_by_token(token) if user is None: try: data = yield sso.auth_token(token) doc_user = { 'id': data['user_id'], 'name': data['ext'].get('name', ''), 'sign_in_provider': data['ext']['firebase']['sign_in_provider'], 'email': '' or data['ext'].get('email'), 'picture': '' or data['ext'].get('picture'), 'pro': False, 'tokens': [{ 'token': data['token'], 'expire': datetime.utcfromtimestamp(int(data['expire'])) }] } yield self.db_user.add_user(data['user_id'], doc_user) raise gen.Return((doc_user, '')) except Exception as e: gen_log.error(e) raise HTTPError( 400, "SSO authenticate token failure, {}".format(str(e))) else: result = yield self.db_user.is_expire(token) if result is None: raise HTTPError(400, "Authentication has expired") raise gen.Return((user, ''))
def _handle_events(self, fd, events): if self.closed(): gen_log.warning("Got events for closed stream %d", fd) return try: if events & self.io_loop.READ: self._handle_read() if self.closed(): return if events & self.io_loop.WRITE: if self._connecting: self._handle_connect() self._handle_write() if self.closed(): return if events & self.io_loop.ERROR: self.error = self.get_fd_error() # We may have queued up a user callback in _handle_read or # _handle_write, so don't close the IOStream until those # callbacks have had a chance to run. self.io_loop.add_callback(self.close) return state = self.io_loop.ERROR if self.reading(): state |= self.io_loop.READ if self.writing(): state |= self.io_loop.WRITE if state == self.io_loop.ERROR: state |= self.io_loop.READ if state != self._state: assert self._state is not None, \ "shouldn't happen: _handle_events without self._state" self._state = state self.io_loop.update_handler(self.fileno(), self._state) except Exception: gen_log.error("Uncaught exception, closing connection.", exc_info=True) self.close(exc_info=True) raise
def delete(cls, field: str, value: object) -> int: '''根据指定字段和值删除 :param cls: :param field: 字段 :param value: 值 :return: ``int`` ''' if not SESSION: gen_log.error('session is null') return ResultCode(0, '无法获取session对象,数据库可能连接错误') session = SESSION() try: ret = session.query(cls).filter(field == value).delete() session.commit() session.close() return ret except Exception as e: gen_log.warning( 'delete {model}({field}={value}) failed, error: {e}'.format( model=cls, field=field, value=value, e=e)) session.close() return ResultCode(0, '删除失败')
def main(): config.init(persistent_path) tornado.options.parse_config_file(conf_path, final=False) tornado.options.parse_command_line() key = config.get_option('cookie_secret') if key is None: gen_log.error('Fatal: secret key not found. ' 'Run `manage.py keys` to create it') sys.exit() settings['cookie_secret'] = key auth.init() application = tornado.web.Application(routes, **settings) server = tornado.httpserver.HTTPServer(application) server.listen(options.server_port) tornado.ioloop.IOLoop.instance().start()
def start(self, host, port): stream = yield TCPClient().connect(host, port) try: self.conn = RPC2Connection(stream, True) try: ret = yield self.conn.read_response(self.delegate) except (iostream.StreamClosedError, iostream.UnsatisfiableReadError): return except _QuietException: # This exception was already logged. self.conn.close() return except Exception: gen_log.error("Uncaught exception", exc_info=True) self.conn.close() return if not ret: return yield gen.moment finally: self.conn.close()
def run_config(self, config): commit_result = False err_msg = '' try: with Config(self.device, mode='exclusive') as cu: try: cu.load(config, format='set') except junos_exception.ConfigLoadError as err: gen_log.error(err) err_msg = 'junos config load {}'.format(err.message) else: try: check = cu.commit_check() if check: commit_result = cu.commit() except junos_exception.CommitError as err: err_msg = 'junos config commit error:{}'.format( err.message) except junos_exception.LockError as lock_err: err_msg = 'junos config lock {}'.format(lock_err.message) return commit_result, err_msg
def get_session(self): fetch_url = 'https://%s/%s' % (self.ilo4_host, self.auth_url) post_data = json.dumps({"UserName": self.user_name, "Password": self.user_password}) http_client = HTTPClient() headers = HTTPHeaders() headers.add('Content-Type', 'application/json') try: response = http_client.fetch( fetch_url, method='POST', headers=headers, validate_cert=False, body=post_data) except HTTPError as error: gen_log.error('ILO4 %s session error:%s' % (self.ilo4_host,error)) raise error else: sessions = { self.SESSTION_LOCATION: response.headers['Location'], self.SESSION_X_AUTH_TOKEN: response.headers['X-Auth-Token'] } return sessions
def post(self): debug = self.get_argument("debug", False) keys = self.settings["line_keys"] + (self.settings["proxy"],) line = Line(*keys) status = True progress = "begin" replies = [] try: penguin = Penguin(self.settings["google_api_key"], self.settings["database_url"]) body = json.loads(self.request.body.decode("utf-8")) progress = "success to load body" reqs = line.receive(body) if len(reqs) == 0: raise Exception("No message is received") progress = "success to parse request" for r in reqs: request_msg = r.content resp = request_msg.reply() result_type, reply = penguin.ask(request_msg.text) replies.append(reply) resp.set_location(reply, penguin.place.name, penguin.place.lat, penguin.place.lng) if not debug: line.post(resp) progress = "success to send message" except Exception as ex: gen_log.error(str(ex)) status = False self.write({ "status": status, "replies": replies, "progress": progress })