Пример #1
0
 def post(self, tag):
     group = Group.objects(tag=tag)[0]
     title = self.get_argument('title')
     content = self.get_argument('content')
     now = datetime.datetime.now()
     mode = self.get_argument('type').decode()
     if mode == 'new':
         try:
             if not title:
                 raise Exception('title is none')
             post = Post(group=group,
                         author=self.get_curent_user_model(),
                         title=title,
                         content=content,
                         create_at=now,
                         update_at=now
             )
             post.save()
             return self.redirect("/group/" + tag + "/" + str(post.id))
         except Exception as ex:
             app_log.error(ex)
             return self.redirect("/group/" + tag)
     elif mode == 'update':
         id = self.get_argument('id')
         try:
             app_log.debug(id)
             app_log.debug(title)
             app_log.debug(content)
             post = Post.objects(id=id)[0]
             post.title = title
             post.content = content
             post.save()
         except Exception as ex:
             app_log.error(ex)
         return self.redirect("/group/" + tag + "/" + id)
Пример #2
0
    def get(self, secure, url):
        proto = 'http' + secure

        if '/?' in url:
            url, query = url.rsplit('/?', 1)
        else:
            query = None

        remote_url = u"{}://{}".format(proto, quote(url))
        if query:
            remote_url = remote_url + '?' + query
        if not url.endswith('.ipynb'):
            # this is how we handle relative links (files/ URLs) in notebooks
            # if it's not a .ipynb URL and it is a link from a notebook,
            # redirect to the original URL rather than trying to render it as a notebook
            refer_url = self.request.headers.get('Referer', '').split('://')[-1]
            if refer_url.startswith(self.request.host + '/url'):
                self.redirect(remote_url)
                return

        response = yield self.fetch(remote_url)

        try:
            nbjson = response_text(response)
        except UnicodeDecodeError:
            app_log.error("Notebook is not utf8: %s", remote_url, exc_info=True)
            raise web.HTTPError(400)

        yield self.finish_notebook(nbjson, download_url=remote_url,
                                   msg="file from url: %s" % remote_url,
                                   public=True)
Пример #3
0
    def post(self):

        id = self.get_argument('id', '')
        resource_url = self.get_argument('resource_url', '')

        http_client = AsyncHTTPClient()
        response = yield http_client.fetch("https://api.douban.com/v2/book/" + id)
        response = json.loads(response.body)

        book = None
        try:
            book = Book.objects(bid=id)[0]
        except Exception as ex:
            app_log.error(ex)
            book = Book(bid=id,
                        title=response['title'],
                        image=response['images']['large'],
                        isbn13=response['isbn13'],
                        publisher=response['publisher'],
                        wcount=0,
                        dcount=0
            )
        finally:
            book.save()

        if resource_url:
            self.share_network_file(book, resource_url)
        else:
            self.share_local_file(book, resource_url)
Пример #4
0
    def _get_certivox_server_secret_share_dta(self, expires):
        path = 'serverSecret'
        url_params = url_concat('{0}{1}'.format(Keys.certivoxServer(), path), {
            'app_id': self.app_id,
            'expires': expires,
            'signature': signMessage('{0}{1}{2}'.format(path, self.app_id, expires), self.app_key)
        })
        log.debug('MIRACL server secret request: {0}'.format(url_params))
        httpclient = tornado.httpclient.HTTPClient()
        try:
            response = httpclient.fetch(url_params, **fetchConfig(url_params))
        except tornado.httpclient.HTTPError as e:
            log.error(e)
            raise SecretsError('Unable to get Server Secret from the MIRACL TA server')
        httpclient.close()

        try:
            data = json.loads(response.body)
        except ValueError as e:
            log.error(e)
            raise SecretsError('Invalid response from TA server')

        if 'serverSecret' not in data:
            raise SecretsError('serverSecret not in response from TA server')

        return data["serverSecret"]
Пример #5
0
 def get_server_secret(self):
     """Generate server secret."""
     try:
         return crypto.get_server_secret(self.master_secret)
     except crypto.CryptoError as e:
         log.error(e)
         raise SecretsError('Server Secret generation failed')
Пример #6
0
 def wrapper(*args, **kwargs):
     try:
         return callback(*args, **kwargs)
     except Exception:
         app_log.error("Uncaught exception in %s",
                       self.request.path, exc_info=True)
         self._abort()
Пример #7
0
 def _generate_master_secret(self):
     """Generate the M-Pin Master Secret."""
     try:
         return crypto.mpin_random_generate(self.rng)
     except crypto.CryptoError as e:
         log.error(e)
         raise SecretsError('M-Pin Master Secret Generation Failed')
Пример #8
0
 def finish_notebook(self, nbjson, download_url, home_url=None, msg=None, breadcrumbs=None):
     """render a notebook from its JSON body.
     
     download_url is required, home_url is not.
     
     msg is extra information for the log message when rendering fails.
     """
     if msg is None:
         msg = download_url
     try:
         app_log.debug("Requesting render of %s", download_url)
         with self.time_block("Rendered %s" % download_url):
             nbhtml, config = yield self.pool.submit(
                 render_notebook, self.exporter, nbjson, download_url,
                 config=self.config,
             )
     except NbFormatError as e:
         app_log.error("Invalid notebook %s: %s", msg, e)
         raise web.HTTPError(400, str(e))
     except Exception as e:
         app_log.error("Failed to render %s", msg, exc_info=True)
         raise web.HTTPError(400, str(e))
     else:
         app_log.debug("Finished render of %s", download_url)
     
     html = self.render_template('notebook.html',
         body=nbhtml,
         download_url=download_url,
         home_url=home_url,
         date=datetime.utcnow().strftime(date_fmt),
         breadcrumbs=breadcrumbs,
         **config)
     yield self.cache_and_finish(html)
Пример #9
0
def update_stats(stats):
    """Get updated stats for each host
    
    If a host fails to reply,
    assume it is is down and assign it zero availability and capacity
    """

    http_client = AsyncHTTPClient()
    futures = {}
    for host in stats.keys():
        app_log.debug("Checking stats on %s" % host)
        req = HTTPRequest(host + '/stats')
        futures[host] = http_client.fetch(req)
    
    for host, f in futures.items():
        try:
            reply = yield f
            data = json.loads(reply.body.decode('utf8'))
        except Exception as e:
            app_log.error("Failed to get stats for %s: %s", host, e)
            if host in stats:
                stats[host] = {'available': 0, 'capacity': 0, 'down': True}
        else:
            app_log.debug("Got stats from %s: %s", host, data)
            if host in stats:
                stats[host] = data
Пример #10
0
    def ping_connection(connection, branch):
        if branch:
            # "branch" refers to a sub-connection of a connection,
            # we don't want to bother pinging on these.
            return

        # turn off "close with result".  This flag is only used with
        # "connectionless" execution, otherwise will be False in any case
        save_should_close_with_result = connection.should_close_with_result
        connection.should_close_with_result = False

        try:
            # run a SELECT 1.   use a core select() so that
            # the SELECT of a scalar value without a table is
            # appropriately formatted for the backend
            connection.scalar(select([1]))
        except exc.DBAPIError as err:
            # catch SQLAlchemy's DBAPIError, which is a wrapper
            # for the DBAPI's exception.  It includes a .connection_invalidated
            # attribute which specifies if this connection is a "disconnect"
            # condition, which is based on inspection of the original exception
            # by the dialect in use.
            if err.connection_invalidated:
                app_log.error("Database connection error, attempting to reconnect: %s", err)
                # run the same SELECT again - the connection will re-validate
                # itself and establish a new connection.  The disconnect detection
                # here also causes the whole connection pool to be invalidated
                # so that all stale connections are discarded.
                connection.scalar(select([1]))
            else:
                raise
        finally:
            # restore "close with result"
            connection.should_close_with_result = save_should_close_with_result
Пример #11
0
    def finish_notebook(self, nbjson, download_url, home_url=None, msg=None):
        """render a notebook from its JSON body.

        download_url is required, home_url is not.

        msg is extra information for the log message when rendering fails.
        """
        app_log.debug("finish_notebook begin")
        if msg is None:
            msg = download_url
        try:
            app_log.debug("Requesting render of %s", download_url)
            with self.time_block("Rendered %s" % download_url):
                nbhtml, config = yield self.pool.submit(
                    render_notebook, self.exporter, nbjson, download_url,
                )
        except NbFormatError as e:
            app_log.error("Invalid notebook %s: %s", msg, e)
            raise web.HTTPError(400, str(e))
        except Exception as e:
            app_log.error("Failed to render %s", msg, exc_info=True)
            raise web.HTTPError(400, str(e))
        else:
            app_log.debug("Finished render of %s", download_url)

        html = self.render_template(config['template'],
            body=nbhtml.replace("</script>", "ESCAPE_CLOSING_SCRIPT_TAG_MAGIC_STRING"),
            download_url=download_url,
            home_url=home_url,
            date=datetime.utcnow().strftime(date_fmt),
            **config)
        yield self.cache_and_finish(html)
Пример #12
0
 def render_string(self, filename, **kwargs):
     '''
         Override render_string to use mako template.
         Like tornado render_string method, this method also
         pass request handler environment to template engine
     '''
     try:
         if not self.is_mobile():
             template = self.LOOK_UP.get_template(filename)
         else:
             template = self.LOOK_UP_MOBILE.get_template(filename)
         env_kwargs = dict(
             handler = self,
             request = self.request,
             # current_user = self.current_user
             locale = self.locale,
             _ = self.locale.translate,
             static_url = self.static_url,
             xsrf_form_html = self.xsrf_form_html,
             reverse_url = self.application.reverse_url,
         )
         env_kwargs.update(kwargs)
         return template.render(**env_kwargs)
     except:
         from mako.exceptions import RichTraceback
         tb = RichTraceback()
         for (module_name, line_no, function_name, line) in tb.traceback:
             print('File:{}, Line:{} in {}'.format(module_name, line_no, function_name))
             print(line)
         app_log.error('Render {} failed, {}:{}'.format(filename, tb.error.__class__.__name__, tb.error))
         raise HTTPError(500, 'Render page failed')
Пример #13
0
    def release(self, container, replace_if_room=True):
        '''Shut down a container and delete its proxy entry.

        Destroy the container in an orderly fashion. If requested and capacity is remaining, create
        a new one to take its place.'''

        try:
            app_log.info("Releasing container [%s].", container)
            yield [
                self.spawner.shutdown_notebook_server(container.id),
                self._proxy_remove(container.path)
            ]
            app_log.debug("Container [%s] has been released.", container)
        except Exception as e:
            app_log.error("Unable to release container [%s]: %s", container, e)
            return

        if replace_if_room:
            running = yield self.spawner.list_notebook_servers(self.container_config,
                                                                    all=False)
            if len(running) + 1 <= self.capacity:
                app_log.debug("Launching a replacement container.")
                yield self._launch_container()
            else:
                app_log.info("Declining to launch a new container because [%i] containers are" +
                             " already running, and the capacity is [%i].",
                             len(running), self.capacity)
Пример #14
0
 def error_callback(future):
     try:
         future.result()
     except Exception as e:
         if not isinstance(e, quiet_exceptions):
             app_log.error("Exception in Future %r after timeout",
                           future, exc_info=True)
Пример #15
0
 def _data_load(self, data_string):
     try:
         return json.loads(data_string)
     except Exception as e:
         log.debug(Lazy(lambda: traceback.format_exc()))
         log.error(Lazy(lambda: 'Parsing message error: {0}'.format(repr(e))))
         raise e
Пример #16
0
    def _on_dlx_received(self, channel, method, props, body):
        correlation_id = getattr(props, 'correlation_id', None)
        if correlation_id in self.callbacks_hash:
            cb = self.callbacks_hash.pop(correlation_id)
        else:
            log.error("Method callback %s is not found", correlation_id)
            channel.basic_ack(delivery_tag=method.delivery_tag)
            return

        try:
            dl = props.headers['x-death'][0]
            body = ExpirationError(
                "Dead letter received. Reason: {0}".format(dl.get('reason'))
            )
            body.reason = dl.get('reason')
            body.time = dl.get('time')
            body.expiration = int(dl.get('original-expiration')) / 1000

            if isinstance(cb, Future):
                self.io_loop.add_callback(cb.set_result, body)
            elif callable(cb):
                self.io_loop.add_callback(cb, body)
            else:
                log.error("Callback is not callable")
        finally:
            channel.basic_ack(delivery_tag=method.delivery_tag)
Пример #17
0
    def cache_and_finish(self, content=""):
        """finish a request and cache the result
        
        does not actually call finish - if used in @web.asynchronous,
        finish must be called separately. But we never use @web.asynchronous,
        because we are using gen.coroutine for async.
        
        currently only works if:
        
        - result is not written in multiple chunks
        - custom headers are not used
        """
        self.write(content)
        short_url = self.truncate(self.request.path)
        cache_data = pickle.dumps({"headers": self.cache_headers, "body": content}, pickle.HIGHEST_PROTOCOL)
        request_time = self.request.request_time()
        # set cache expiry to 120x request time
        # bounded by cache_expiry_min,max
        # a 30 second render will be cached for an hour
        expiry = max(min(120 * request_time, self.cache_expiry_max), self.cache_expiry_min)

        if self.request.uri in self.max_cache_uris:
            # if it's a link from the front page, cache for a long time
            expiry = self.cache_expiry_max

        log = app_log.info if expiry > self.cache_expiry_min else app_log.debug
        log("caching (expiry=%is) %s", expiry, short_url)
        try:
            with self.time_block("cache set %s" % short_url):
                yield self.cache.set(self.cache_key, cache_data, int(time.time() + expiry))
        except Exception:
            app_log.error("cache set for %s failed", short_url, exc_info=True)
        else:
            app_log.debug("cache set finished %s", short_url)
Пример #18
0
    def post(self):
        result = {'success': True}
        dirname = '/var/todother/uploads/'
        if self.request.files:
            try:
                upload_img = self.request.files['postfile'][0]
                rawname = upload_img['filename']
                destname = '%d%s' % (time.time(), ''.join(random.choice(string.ascii_lowercase + string.digits) for x in range(6)))
                thumbname = 'thumb_%s' % destname
                path = '%s/%s/pics/' % (dirname, self.current_user.user_id)
                if not os.path.exists(path):
                    os.makedirs(path)
                extension = os.path.splitext(rawname)[1]
                destname = ''.join((path, destname, extension))
                output_img = open(destname, 'w')
                output_img.write(upload_img['body'])
                output_img.close()

                oimg = Image.open(destname)
                oimg.thumbnail((160, 160), resample=1)
                thumbname = ''.join((path, thumbname, extension))
                oimg.save(thumbname)
                result['thumbname'] = thumbname[len(dirname):]
                result['filename'] = destname[len(dirname):]
            except Exception, e:
                app_log.error(str(e))
                result['success'] = False
                result['err_info'] = 'File type unsupported'
Пример #19
0
    def get_places(self, ll, q):
        url = FacebookComm.BASE_URL.format(endpoint=FacebookComm.SEARCH_ENDPOINT)
        place = None

        try:
            url += '&type=place&center={ll}&distance=100&q={q}'.format(ll=ll, q=q)

            log.info('Fetching Facebook places from [{0}]'.format(url))
            request = HTTPRequest(url=url, connect_timeout=options.http_request_timeout, request_timeout=options.http_request_timeout)
            response = yield self.client.fetch(request)

            if response.code != 200:
                raise FacebookError(response.code)

            body = json.loads(response.body)

            places = body['data']
            if len(places) > 0:
                place = places[0]

        except HTTPError as e:
            log.error('Facebook error [{0}] while calling [{1}]!'.format(e, url))
            raise Return(None)

        raise Return(place)
Пример #20
0
 def _handle_connection(self, connection, address):
     if self.ssl_options is not None:
         assert ssl, "Python 2.6+ and OpenSSL required for SSL"
         try:
             connection = ssl_wrap_socket(connection,
                                          self.ssl_options,
                                          server_side=True,
                                          do_handshake_on_connect=False)
         except ssl.SSLError as err:
             if err.args[0] == ssl.SSL_ERROR_EOF:
                 return connection.close()
             else:
                 raise
         except socket.error as err:
             if err.args[0] == errno.ECONNABORTED:
                 return connection.close()
             else:
                 raise
     try:
         if self.ssl_options is not None:
             stream = SSLIOStream(connection, io_loop=self.io_loop)
         else:
             stream = IOStream(connection, io_loop=self.io_loop)
         self.handle_stream(stream, address)
     except Exception:
         app_log.error("Error in connection callback", exc_info=True)
Пример #21
0
    def cache_and_finish(self, content=""):
        """finish a request and cache the result
        
        does not actually call finish - if used in @web.asynchronous,
        finish must be called separately. But we never use @web.asynchronous,
        because we are using gen.coroutine for async.
        
        currently only works if:
        
        - result is not written in multiple chunks
        - custom headers are not used
        """
        self.write(content)
        short_url = self.truncate(self.request.uri)
        bcontent = utf8(content)
        request_time = self.request.request_time()
        # set cache expiry to 120x request time
        # bounded by cache_expiry_min,max
        # a 30 second render will be cached for an hour
        expiry = max(min(120 * request_time, self.cache_expiry_max), self.cache_expiry_min)
        refer_url = self.request.headers.get("Referer", "").split("://")[-1]
        if refer_url == self.request.host + "/" and not self.get_argument("create", ""):
            # if it's a link from the front page, cache for a long time
            expiry = self.cache_expiry_max

        app_log.info("caching (expiry=%is) %s", expiry, short_url)
        try:
            with self.time_block("cache set %s" % short_url):
                yield self.cache.set(self.cache_key, bcontent, int(time.time() + expiry))
        except Exception:
            app_log.error("cache set for %s failed", short_url, exc_info=True)
        else:
            app_log.debug("cache set finished %s", short_url)
Пример #22
0
 def _called(self):
     self._active = False
     self._reactor._removeDelayedCall(self)
     try:
         self._func()
     except:
         app_log.error("_called caught exception", exc_info=True)
Пример #23
0
 def __init__(self, template_string, name="<string>", loader=None, compress_whitespace=None, autoescape=_UNSET):
     self.name = name
     if compress_whitespace is None:
         compress_whitespace = name.endswith(".html") or name.endswith(".js")
     if autoescape is not _UNSET:
         self.autoescape = autoescape
     elif loader:
         self.autoescape = loader.autoescape
     else:
         self.autoescape = _DEFAULT_AUTOESCAPE
     self.namespace = loader.namespace if loader else {}
     reader = _TemplateReader(name, escape.native_str(template_string))
     self.file = _File(self, _parse(reader, self))
     self.code = self._generate_python(loader, compress_whitespace)
     self.loader = loader
     try:
         # Under python2.5, the fake filename used here must match
         # the module name used in __name__ below.
         # The dont_inherit flag prevents template.py's future imports
         # from being applied to the generated code.
         self.compiled = compile(
             escape.to_unicode(self.code), "%s.generated.py" % self.name.replace(".", "_"), "exec", dont_inherit=True
         )
     except Exception:
         formatted_code = _format_code(self.code).rstrip()
         app_log.error("%s code:\n%s", self.name, formatted_code)
         raise
Пример #24
0
 def __init__(self, *args, **kwargs):
     super(HttpRequest, self).__init__(*args, **kwargs)
     this.middleware_fac.set_request(self)
     try:
         this.middleware_fac.run_call(self)
     except Exception, ex:
         app_log.error(ex)
Пример #25
0
    def send_error(self, status_code=500, **kwargs):
        """Sends the given HTTP error code to the browser.
        If `flush()` has already been called, it is not possible to send
        an error, so this method will simply terminate the response.
        If output has been written but not yet flushed, it will be discarded
        and replaced with the error page.
        Override `write_error()` to customize the error page that is returned.
        Additional keyword arguments are passed through to `write_error`.
        """
        if self._headers_written:
            gen_log.error("Cannot send error response after headers written")
            if not self._finished:
                self.finish()
            return
        # Need keep headers 
        #self.clear()

        reason = kwargs.get('reason')
        if 'exc_info' in kwargs:
            exception = kwargs['exc_info'][1]
            if isinstance(exception, HTTPError) and exception.reason:
                reason = exception.reason
        self.set_status(status_code, reason=reason)
        try:
            self.write_error(status_code, **kwargs)
        except Exception:
            app_log.error("Uncaught exception in write_error", exc_info=True)
        if not self._finished:
            self.finish()
Пример #26
0
 def get(self, user, repo, ref, path):
     raw_url = u"https://raw.github.com/{user}/{repo}/{ref}/{path}".format(
         user=user, repo=repo, ref=ref, path=quote(path)
     )
     blob_url = u"https://github.com/{user}/{repo}/blob/{ref}/{path}".format(
         user=user, repo=repo, ref=ref, path=quote(path),
     )
     with self.catch_client_error():
         response = yield self.client.fetch(raw_url)
     
     if response.effective_url.startswith("https://github.com/{user}/{repo}/tree".format(
         user=user, repo=repo
     )):
         tree_url = "/github/{user}/{repo}/tree/{ref}/{path}/".format(
             user=user, repo=repo, ref=ref, path=quote(path),
         )
         app_log.info("%s is a directory, redirecting to %s", raw_url, tree_url)
         self.redirect(tree_url)
         return
     
     filedata = response.body
     
     if path.endswith('.ipynb'):
         try:
             nbjson = response_text(response)
         except Exception as e:
             app_log.error("Failed to decode notebook: %s", raw_url, exc_info=True)
             raise web.HTTPError(400)
         yield self.finish_notebook(nbjson, raw_url,
             home_url=blob_url,
             msg="file from GitHub: %s" % raw_url,
         )
     else:
         self.set_header("Content-Type", "text/plain")
         self.cache_and_finish(filedata)
Пример #27
0
    def _api_request(self, method, url, **kwargs):
        """Make an API request"""
        allow_404 = kwargs.pop('allow_404', False)
        headers = kwargs.setdefault('headers', {})
        headers.setdefault('Authorization', 'token %s' % self.api_token)
        try:
            r = requests.request(method, url, **kwargs)
        except requests.ConnectionError as e:
            app_log.error("Error connecting to %s: %s", self.api_url, e)
            msg = "Failed to connect to Hub API at %r." % self.api_url
            msg += "  Is the Hub accessible at this URL (from host: %s)?" % socket.gethostname()
            if '127.0.0.1' in self.api_url:
                msg += "  Make sure to set c.JupyterHub.hub_ip to an IP accessible to" + \
                       " single-user servers if the servers are not on the same host as the Hub."
            raise HTTPError(500, msg)

        data = None
        if r.status_code == 404 and allow_404:
            pass
        elif r.status_code == 403:
            app_log.error("I don't have permission to check authorization with JupyterHub, my auth token may have expired: [%i] %s", r.status_code, r.reason)
            app_log.error(r.text)
            raise HTTPError(500, "Permission failure checking authorization, I may need a new token")
        elif r.status_code >= 500:
            app_log.error("Upstream failure verifying auth token: [%i] %s", r.status_code, r.reason)
            app_log.error(r.text)
            raise HTTPError(502, "Failed to check authorization (upstream problem)")
        elif r.status_code >= 400:
            app_log.warning("Failed to check authorization: [%i] %s", r.status_code, r.reason)
            app_log.warning(r.text)
            raise HTTPError(500, "Failed to check authorization")
        else:
            data = r.json()

        return data
Пример #28
0
 def get_client_secret(self, mpin_id):
     """Generate client secret."""
     try:
         return crypto.get_client_multiple(self.master_secret, mpin_id)
     except crypto.CryptoError as e:
         log.error(e)
         raise SecretsError('Client secret generation failed')
Пример #29
0
    def _log_rate_limit(self, future):
        """log GitHub rate limit headers
        
        - error if 0 remaining
        - warn if 10% or less remain
        - debug otherwise
        """
        try:
            r = future.result()
        except HTTPError as e:
            r = e.response
        limit_s = r.headers.get("X-RateLimit-Limit", "")
        remaining_s = r.headers.get("X-RateLimit-Remaining", "")
        if not remaining_s or not limit_s:
            if r.code < 300:
                app_log.warn("No rate limit headers. Did GitHub change? %s", json.dumps(r.headers, indent=1))
            return

        remaining = int(remaining_s)
        limit = int(limit_s)
        if remaining == 0:
            jsondata = response_text(r)
            data = json.loads(jsondata)
            app_log.error("GitHub rate limit (%s) exceeded: %s", limit, data.get("message", "no message"))
            return

        if 10 * remaining > limit:
            log = app_log.debug
        else:
            log = app_log.warn
        log("%i/%i GitHub API requests remaining", remaining, limit)
Пример #30
0
    def _run_callback(self, callback: Callable[[], Any]) -> None:
        """Runs a callback with error handling.

        For use in subclasses.
        """
        try:
            ret = callback()
            if ret is not None:
                from tornado import gen

                # Functions that return Futures typically swallow all
                # exceptions and store them in the Future.  If a Future
                # makes it out to the IOLoop, ensure its exception (if any)
                # gets logged too.
                try:
                    ret = gen.convert_yielded(ret)
                except gen.BadYieldError:
                    # It's not unusual for add_callback to be used with
                    # methods returning a non-None and non-yieldable
                    # result, which should just be ignored.
                    pass
                else:
                    self.add_future(ret, self._discard_future_result)
        except Exception:
            app_log.error("Exception in callback %r", callback, exc_info=True)
Пример #31
0
 def login(self):
     mobile = self.get_argument('mobile', None).strip()
     password = self.get_argument('password', None).strip()
     if not mobile or not password:
         self.reply_json_error(1, u'用户名或者密码不能为空')
         self.redirect('/login')
         return
     try:
         u = User.select(User.q.mobile == mobile)
         if not u.count():
             self.reply_json_error(1, u"该手机号未注册")
             return
         user = u.getOne()
         salt = user.salt
         passwd = user.password
         m = hashlib.md5()
         m.update(password + salt)
         hashpassword = m.hexdigest()
         if not passwd == hashpassword:
             self.reply_json_error(1, u"手机号或者密码不正确")
             return
         ls = LoginStatus.select(LoginStatus.q.mobile == mobile)
         app_log.error(ls)
         if not ls.count():
             lls = LoginStatus(mobile=mobile,
                               status=1,
                               ip=self.request.remote_ip)
             if lls:
                 self.reply_json_data(0, u'登录成功')
         if ls.getOne().status == 1:
             self.reply_json_error(1, u'用户已经登录')
             self.redirect('/book')
             return
         else:
             ls.getOne().set(status=1,
                             login_time=datetime.now(),
                             ip=self.request.remote_ip)
         self.set_secure_cookie("sign", mobile, version=2, expires_days=1)
         self.set_secure_cookie("login", salt, version=2, expires_days=1)
         self.redirect('/book')
         return
     except Exception as e:
         app_log.error(str(e))
Пример #32
0
 def log_exception(self, typ, value, tb):
     if isinstance(value, APIError):
         app_log.error(
             'API error %s: %s\n%r',
             value.error_id,
             value.log_message % value.args if value.log_message else '',
             self.request,
             exc_info=(typ, value, tb))
     elif isinstance(value, tornado.web.HTTPError):
         if value.log_message:
             format = "%d %s: " + value.log_message
             args = ([value.status_code, self._request_summary()] +
                     list(value.args))
             gen_log.warning(format, *args)
     else:
         value.error_id = APIError._generate_id()
         app_log.error("Uncaught exception %s %s\n%r",
                       self._request_summary(), value.error_id,
                       self.request, exc_info=(typ, value, tb))
Пример #33
0
    async def cache_and_finish(self, content=''):
        """finish a request and cache the result

        currently only works if:

        - result is not written in multiple chunks
        - custom headers are not used
        """
        request_time = self.request.request_time()
        # set cache expiry to 120x request time
        # bounded by cache_expiry_min,max
        # a 30 second render will be cached for an hour
        expiry = max(
            min(120 * request_time, self.cache_expiry_max),
            self.cache_expiry_min,
        )

        if self.request.uri in self.max_cache_uris:
            # if it's a link from the front page, cache for a long time
            expiry = self.cache_expiry_max

        if expiry > 0:
            self.set_header("Cache-Control", "max-age=%i" % expiry)

        self.write(content)
        self.finish()

        short_url = self.truncate(self.request.path)
        cache_data = pickle.dumps({
            'headers' : self.cache_headers,
            'body' : content,
        }, pickle.HIGHEST_PROTOCOL)
        log = app_log.info if expiry > self.cache_expiry_min else app_log.debug
        log("caching (expiry=%is) %s", expiry, short_url)
        try:
            with time_block("cache set %s" % short_url):
                await self.cache.set(
                    self.cache_key, cache_data, int(time.time() + expiry),
                )
        except Exception:
            app_log.error("cache set for %s failed", short_url, exc_info=True)
        else:
            app_log.debug("cache set finished %s", short_url)
Пример #34
0
 def post(self, cmd):
     ret = {}
     try:
         if cmd == "update":
             radio_number = self.get_argument("radio_number")
             if (radio_number == '1'):
                 # print("radio1")
                 radio = Config("radio1")
                 mode = self.get_argument("lora_mode")
                 preamble = self.get_argument("preamble")
                 spread = self.get_argument("spread")
                 base_band = self.get_argument("base_band")
                 program_radio = self.get_argument("program_ratio")
                 frequency = self.get_argument("frequency")
                 crc_enable = self.get_argument("crc_enable_value")
                 power = self.get_argument("power")
                 sync = self.get_argument("sync")
                 radio.set_item("mode", mode)
                 radio.set_item("preamble", preamble)
                 radio.set_item("sf", spread)
                 radio.set_item("bw", base_band)
                 radio.set_item("cr", program_radio)
                 radio.set_item("frequency", frequency)
                 radio.set_item("crc", crc_enable)
                 radio.set_item("power", power)
                 radio.set_item("sync", sync)
                 radio.save()
                 data = {"radio_number": "1"}
                 send_to_service('epd_service', '/radio/update', data=data)
             else:
                 radio = Config("radio2")
                 data = {"radio_number": "2"}
                 send_to_service('epd_service', '/radio/update', data=data)
             ret['status'] = 'success'
         elif cmd == "restart":
             radio_number = self.get_argument("radio_number")
             data = {"radio_number": radio_number}
             send_to_service('epd_service', 'radio/restart', data=data)
     except Exception as e:
         ret['status'] = 'failed'
         ret['err_msg'] = e.__repr__()
         LOG.error(e.__str__())
     self.write(ret)
Пример #35
0
	def update_spider_log(self, spider_id=None, spider_oid=None, log_to_update=None, value=None) :
		""" update log of a spider """

		app_log.info("... update_spider_log --- spider_id %s updating / log_to_update : %s", spider_id, log_to_update)

		# find the spider
		spider = self.application.coll_spiders.find_one( {"_id": spider_oid } )

		# update
		if spider != None : 
			self.application.coll_spiders.update_one( 
														{"_id"	: spider_oid }, 
														{"$set" : { "scraper_log.{}".format(log_to_update) : value } },
														upsert = True
													)
			app_log.info("... update_spider_log --- spider updated...")

		else : 
			app_log.error("... update_spider_log --- THERE IS NO spider_id %s", spider_id)
Пример #36
0
 def post(self, method):
     ret = {}
     if method == 'update':
         try:
             device = self.get_argument('device')
             if device == 'eth':
                 self.sent_cmd("%s -i ethernet --command set --mode %s --addr %s --netmask %s" % \
                     (self.connect_path, self.get_argument('mode'), self.get_argument('wire_address'), self.get_argument('wire_netmask')))
                 ret['status'] = 'success'
             else:
                 self.sent_cmd("%s -i wifi --command set --mode %s --ssid %s --psk %s" % \
                     (self.connect_path, 'sta', self.get_argument('sta_ssid'), self.get_argument('sta_passwd')))
                 ret['status'] = 'success'
         except Exception as e:
             LOG.error(e.__str__())
             ret['status'] = 'failed'
             ret['err_msg'] = "配置失败"
         finally:
             self.write(ret)
Пример #37
0
    def _get_from_service(self, _id):
        try:
            app_log.debug("UserInfo,get_from_service,_id=%s", _id)
            user_info = self._user_data.get(_id)
            data = {}
            if user_info is not None and "facebook_user_data" in user_info and "picture" in user_info[
                "facebook_user_data"] and "data" in user_info["facebook_user_data"]["picture"] and "url" in \
                    user_info["facebook_user_data"]["picture"]["data"]:

                data["profile_photo_url"] = user_info["facebook_user_data"][
                    "picture"]["data"]["url"]

                return data
            else:
                return None

        except:
            app_log.error("get_from_service,_id=%s", _id)
            return None
Пример #38
0
 def get_option(self, name=None, oid=None):
     where = []
     param = dict()
     if name is not None:
         where.append("o.name=%(name)s")
         param['name'] = name
     if oid is not None:
         where.append("o.id=%(oid)s")
         param['oid'] = oid
     if where:
         sql = "SELECT * FROM t_options o WHERE {}".format(
             ' AND '.join(where))
         cursor = yield pool.execute(sql, param)
         result = cursor.fetchone()
         cursor.close()
         return munchify(result)
     else:
         log.error('参数不对, 获取系统配置失败')
         return None
Пример #39
0
 def _handle_connection(self, connection, address):
     if self.ssl_options is not None:
         assert ssl, "Python 2.6+ and OpenSSL required for SSL"
         try:
             connection = ssl_wrap_socket(connection,
                                          self.ssl_options,
                                          server_side=True,
                                          do_handshake_on_connect=False)
         except ssl.SSLError as err:
             if err.args[0] == ssl.SSL_ERROR_EOF:
                 return connection.close()
             else:
                 raise
         except socket.error as err:
             # If the connection is closed immediately after it is created
             # (as in a port scan), we can get one of several errors.
             # wrap_socket makes an internal call to getpeername,
             # which may return either EINVAL (Mac OS X) or ENOTCONN
             # (Linux).  If it returns ENOTCONN, this error is
             # silently swallowed by the ssl module, so we need to
             # catch another error later on (AttributeError in
             # SSLIOStream._do_ssl_handshake).
             # To test this behavior, try nmap with the -sT flag.
             # https://github.com/tornadoweb/tornado/pull/750
             if errno_from_exception(err) in (errno.ECONNABORTED, errno.EINVAL):
                 return connection.close()
             else:
                 raise
     # IOStream 和 SSLIOStream是比较大的类,在iostream.py文件中定义,
     try:
         if self.ssl_options is not None:
             stream = SSLIOStream(connection, io_loop=self.io_loop,
                                  max_buffer_size=self.max_buffer_size,
                                  read_chunk_size=self.read_chunk_size)
         else:
             stream = IOStream(connection, io_loop=self.io_loop,
                               max_buffer_size=self.max_buffer_size,
                               read_chunk_size=self.read_chunk_size)
         future = self.handle_stream(stream, address)
         if future is not None:
             self.io_loop.add_future(future, lambda f: f.result())
     except Exception:
         app_log.error("Error in connection callback", exc_info=True)
Пример #40
0
  def _with_retries(self, fn, *args, **kwargs):
      '''Attempt a Docker API call.

      If an error occurs, retry up to "max_tries" times before letting the exception propagate
      up the stack.''' 
      max_tries = kwargs.get('max_tries', RETRIES)
      try:
          if 'max_tries' in kwargs:
              del kwargs['max_tries']
          result = yield fn(*args, **kwargs)
          raise gen.Return(result)
      except (docker.errors.APIError, requests.exceptions.RequestException) as e:
          app_log.error("Encountered a Docker error with {} ({} retries remain): {}".format(fn.__name__, max_tries, e))
          if max_tries > 0:
              kwargs['max_tries'] = max_tries - 1
              result = yield self._with_retries(fn, *args, **kwargs)
              raise gen.Return(result)
          else:
              raise e
Пример #41
0
    def get_client(self, readonly):

        result = None

        pool = self._s_pool if readonly else self._m_pool

        try:

            conn = yield pool._get_conn()

            result = DBClient(pool, conn, readonly)

        except Exception as err:

            pool._close_conn(conn)

            app_log.error(err)

        return result
Пример #42
0
 def get_result(self):
     result_list = []
     exc_info = None
     for f in self.children:
         try:
             result_list.append(f.get_result())
         except Exception as e:
             if exc_info is None:
                 exc_info = sys.exc_info()
             else:
                 if not isinstance(e, self.quiet_exceptions):
                     app_log.error("Multiple exceptions in yield list",
                                   exc_info=True)
     if exc_info is not None:
         raise_exc_info(exc_info)
     if self.keys is not None:
         return dict(zip(self.keys, result_list))
     else:
         return list(result_list)
Пример #43
0
    async def get_notebook_data(self, secure, netloc, url):
        proto = 'http' + secure
        netloc = url_unescape(netloc)

        if '/?' in url:
            url, query = url.rsplit('/?', 1)
        else:
            query = None

        remote_url = u"{}://{}/{}".format(proto, netloc, quote(url))

        if query:
            remote_url = remote_url + '?' + query
        if not url.endswith('.ipynb'):
            # this is how we handle relative links (files/ URLs) in notebooks
            # if it's not a .ipynb URL and it is a link from a notebook,
            # redirect to the original URL rather than trying to render it as a notebook
            refer_url = self.request.headers.get('Referer', '').split('://')[-1]
            if refer_url.startswith(self.request.host + '/url'):
                self.redirect(remote_url)
                return

        parse_result = urlparse(remote_url)

        robots_url = parse_result.scheme + "://" + parse_result.netloc + "/robots.txt"

        public = False # Assume non-public

        try:
            robots_response = await self.fetch(robots_url)
            robotstxt = response_text(robots_response)
            rfp = robotparser.RobotFileParser()
            rfp.set_url(robots_url)
            rfp.parse(robotstxt.splitlines())
            public = rfp.can_fetch('*', remote_url)
        except httpclient.HTTPError as e:
            app_log.debug("Robots.txt not available for {}".format(remote_url),
                    exc_info=True)
            public = True
        except Exception as e:
            app_log.error(e)

        return remote_url, public
Пример #44
0
 def callback(f):
     unfinished_children.remove(f)
     if not unfinished_children:
         result_list = []
         for f in children:
             try:
                 result_list.append(f.result())
             except Exception as e:
                 if future.done():
                     if not isinstance(e, quiet_exceptions):
                         app_log.error("Multiple exceptions in yield list",
                                       exc_info=True)
                 else:
                     future.set_exc_info(sys.exc_info())
         if not future.done():
             if keys is not None:
                 future.set_result(dict(zip(keys, result_list)))
             else:
                 future.set_result(result_list)
Пример #45
0
    def post(self):
        unknown = self.request.files.get('unknown')
        if len(unknown) != 1:
            app_log.error("number of unknown picture must be 1")
            self.render("result.html",
                        name="error",
                        tolerance="number of unknown picture must be 1")
            return
        info = unknown[0]
        filename, content_type, body = info['filename'], info[
            'content_type'], info['body']
        try:
            unknown_face = Face(body, "unknown")
            app_log.info(unknown_face.locations)
        except Exception as e:
            app_log.error(e)
            self.render("result.html",
                        name="error",
                        tolerance=str(e),
                        deviation="",
                        image_file_name="")
            return

        check_rst = face_recognition.face_distance(
            self.application.face_store.encodings, unknown_face.encoding_list)
        # unknown_face.draw()
        app_log.info(check_rst)

        check_rst_sorted = sorted([(index, value)
                                   for index, value in enumerate(check_rst)],
                                  key=lambda x: x[1])
        detected_face = self.application.face_store.profiles[
            check_rst_sorted[0][0]]
        deviation = round(check_rst_sorted[0][1], 3)
        app_log.info(
            f"name: {detected_face.name}, deviation: {deviation}, tolerance: {TOLERANCE}"
        )
        print(detected_face.file_path.name)
        self.render("result.html",
                    name=detected_face.name,
                    tolerance=TOLERANCE,
                    deviation=deviation,
                    image_file_name=detected_face.file_path.name)
Пример #46
0
    def health_check(self):
        app_log.info("Performing Health Check!")
        main_records = yield self.zone.list_dns_records(self.main_domain)
        drain_records = yield self.zone.list_dns_records(self.drain_domain)

        app_log.info("{} in main, {} in drain".format(len(main_records), len(drain_records)))

        to_drain = []
        to_main = []

        total = 0.0
        response_times = []

        for record in main_records:
            try:
                ip = record['content']
                resp = yield self.http_client.fetch(ip)
                app_log.debug(resp)

                # Total is in seconds, convert to ms
                record_total = resp.time_info['total']*1000
                total += record_total
                response_times.append(record_total)

            except HTTPError as e:
                app_log.error(e)
                to_drain.append(ip)
                
        # Log to statuspage
        if self.status_page is not None:
            average_response = ( total/len(main_records) )
            app_log.info("Average Response: {} ms".format(average_response))

            self.status_page.report(average_response, 
                                    metric_id=self.status_page.metric_ids['average response'])
            self.status_page.report(len(main_records),
                                    metric_id=self.status_page.metric_ids['active nodes'])
            self.status_page.report(max(response_times),
                                    metric_id=self.status_page.metric_ids['max response'])
            self.status_page.report(min(response_times),
                                    metric_id=self.status_page.metric_ids['min response'])
            self.status_page.report(len(to_drain),
                                    metric_id=self.status_page.metric_ids['unresponsive nodes'])
Пример #47
0
    def get(self, provider_prefix, _unescaped_spec):
        prefix = '/v2/' + provider_prefix
        spec = self.get_spec_from_request(prefix)
        spec = spec.rstrip("/")
        try:
            self.get_provider(provider_prefix, spec=spec)
        except HTTPError:
            raise
        except Exception as e:
            app_log.error(
                "Failed to construct provider for %s/%s",
                provider_prefix,
                spec,
            )
            # FIXME: 400 assumes it's the user's fault (?)
            # maybe we should catch a special InvalidSpecError here
            raise HTTPError(400, str(e))

        provider_spec = f'{provider_prefix}/{spec}'
        social_desc = f"{SPEC_NAMES[provider_prefix]}: {spec}"
        nbviewer_url = None
        if provider_prefix == "gh":
            # we can only produce an nbviewer URL for github right now
            nbviewer_url = 'https://nbviewer.jupyter.org/github'
            org, repo_name, ref = spec.split('/', 2)
            # NOTE: tornado unquotes query arguments too -> notebooks%2Findex.ipynb becomes notebooks/index.ipynb
            filepath = self.get_argument('filepath', '').lstrip('/')
            blob_or_tree = 'blob' if filepath else 'tree'
            nbviewer_url = f'{nbviewer_url}/{org}/{repo_name}/{blob_or_tree}/{ref}/{filepath}'
        self.render_template(
            "loading.html",
            base_url=self.settings['base_url'],
            badge_base_url=self.settings['badge_base_url'],
            provider_spec=provider_spec,
            social_desc=social_desc,
            nbviewer_url=nbviewer_url,
            # urlpath=self.get_argument('urlpath', None),
            submit=True,
            google_analytics_code=self.settings['google_analytics_code'],
            google_analytics_domain=self.settings['google_analytics_domain'],
            extra_footer_scripts=self.settings['extra_footer_scripts'],
        )
Пример #48
0
    def _launch_container(self, path=None):
        '''Launch a new notebook server in a fresh container, register it with the proxy, and
        add it to the pool.'''

        if path is None:
            path = user_prefix()

        app_log.debug("Launching new notebook server for user [%s].", path)
        create_result = yield self.spawner.create_notebook_server(base_path=path,
                                                                  container_config=self.container_config)
        container_id, host_ip, host_port = create_result
        app_log.debug("Created notebook server for [%s] at [%s:%s]", path, host_ip, host_port)

        # Wait for the server to launch within the container before adding it to the pool or
        # serving it to a user.
        yield self._wait_for_server(host_ip, host_port, path)

        http_client = AsyncHTTPClient()
        headers = {"Authorization": "token {}".format(self.proxy_token)}

        proxy_endpoint = "{}/api/routes/{}".format(self.proxy_endpoint, path)
        body = json.dumps({
            "target": "http://{}:{}".format(host_ip, host_port),
            "container_id": container_id,
        })

        app_log.debug("Proxying notebook [%s] to port [%s].", path, host_port)
        req = HTTPRequest(proxy_endpoint,
                          method="POST",
                          headers=headers,
                          body=body)
        try:
            yield http_client.fetch(req)
            app_log.info("Proxied notebook [%s] to port [%s].", path, host_port)
        except HTTPError as e:
            app_log.error("Failed to create proxy route to [%s]: %s", path, e)

        container = PooledContainer(id=container_id, path=path)
        app_log.info("Adding container [%s] to the pool.", container)
        self.available.append(container)

        raise gen.Return(container)
Пример #49
0
    def cache_and_finish(self, content=''):
        """finish a request and cache the result
        
        does not actually call finish - if used in @web.asynchronous,
        finish must be called separately. But we never use @web.asynchronous,
        because we are using gen.coroutine for async.
        
        currently only works if:
        
        - result is not written in multiple chunks
        - custom headers are not used
        """
        self.write(content)
        short_url = self.truncate(self.request.path)
        bcontent = utf8(content)
        request_time = self.request.request_time()
        # set cache expiry to 120x request time
        # bounded by cache_expiry_min,max
        # a 30 second render will be cached for an hour
        expiry = max(
            min(120 * request_time, self.cache_expiry_max),
            self.cache_expiry_min,
        )
        refer_url = self.request.headers.get('Referer', '').split('://')[-1]
        if refer_url == self.request.host + '/' and not self.get_argument(
                'create', ''):
            # if it's a link from the front page, cache for a long time
            expiry = self.cache_expiry_max

        log = app_log.info if expiry > self.cache_expiry_min else app_log.debug
        log("caching (expiry=%is) %s", expiry, short_url)
        try:
            with self.time_block("cache set %s" % short_url):
                yield self.cache.set(
                    self.cache_key,
                    bcontent,
                    int(time.time() + expiry),
                )
        except Exception:
            app_log.error("cache set for %s failed", short_url, exc_info=True)
        else:
            app_log.debug("cache set finished %s", short_url)
Пример #50
0
    async def create(self, job_type: str, job_parameters: dict, **kwargs):

        log.info(f'New job request ({job_type}) for user: {self.current_user}')

        if kwargs:
            msg = f'Illegal arguments provided: {", ".join([k for k in kwargs.keys()])}.'
            log.info(msg)
            raise HTTPError(400, msg)

        if not isinstance(job_parameters, dict):
            msg = f'Unexpected argument, "job_parameters" should be in ' \
                  f'dict-like, but got {type(job_parameters)}'
            log.error(msg)
            raise HTTPError(400, msg)

        # Find the right job, and check parameters
        task = jobs.registry.get(job_type)

        if task is None:
            raise HTTPError(404, f'Job {job_type!r} not found.')
        log.info(f'Job {job_type!r} found.')

        task_id = str(uuid.uuid4())  # Job id used for tracking.
        await self.application.redis.sadd(self.user_jobs_key, task_id)

        task_status = await self.get_task_status(task_id)
        await task_status.create(job_type=job_type,
                                 job_parameters=job_parameters,
                                 status=Status.REGISTERED,
                                 user=self.current_user,
                                 created_at=datetime.utcnow().isoformat(
                                     sep='T', timespec='seconds') + 'Z')

        try:
            task.apply_async(kwargs=job_parameters,
                             task_id=task_id,
                             headers=self.request.headers)
        except Exception as e:
            raise tornado.web.HTTPError(400, str(e))

        self.write(await task_status.get())
        self.finish()
Пример #51
0
    def get_version(cls, settings, path):
        """Generate the version string to be used in static URLs.

        This method may be overridden in subclasses (but note that it
        is a class method rather than a static method).  The default
        implementation uses a hash of the file's contents.

        ``settings`` is the `Application.settings` dictionary and ``path``
        is the relative location of the requested asset on the filesystem.
        The returned value should be a string, or ``None`` if no version
        could be determined.
        """
        # begin subclass override:
        static_paths = settings['static_path']
        if isinstance(static_paths, basestring):
            static_paths = [static_paths]
        roots = tuple(
            os.path.abspath(os.path.expanduser(p)) + os.path.sep
            for p in static_paths)

        try:
            abs_path = filefind(path, roots)
        except IOError:
            app_log.error("Could not find static file %r", path)
            return None

        # end subclass override

        with cls._lock:
            hashes = cls._static_hashes
            if abs_path not in hashes:
                try:
                    f = open(abs_path, "rb")
                    hashes[abs_path] = hashlib.md5(f.read()).hexdigest()
                    f.close()
                except Exception:
                    app_log.error("Could not open static file %r", path)
                    hashes[abs_path] = None
            hsh = hashes.get(abs_path)
            if hsh:
                return hsh[:5]
        return None
Пример #52
0
    def _get_customer_server_secret_share(self, expires):
        path = 'serverSecret'
        url_params = url_concat(
            '{0}/{1}'.format(options.DTALocalURL, path),
            {
                'app_id': self.app_id,
                'expires': expires,
                'signature': signMessage('{0}{1}{2}'.format(path, self.app_id, expires), self.app_key)
            })
        log.debug('customer server secret request: {0}'.format(url_params))

        httpclient = tornado.httpclient.HTTPClient()

        import socket
        # Make at most 30 attempts to get server secret from local TA
        for attempt in range(30):
            try:
                response = httpclient.fetch(url_params)
            except (tornado.httpclient.HTTPError, socket.error) as e:
                log.error(e)
                log.error(
                    'Unable to get Server Secret from the customer TA server. '
                    'Retying...')
                time.sleep(2)
                continue

            httpclient.close()
            break
        else:
            # Max attempts reached
            raise SecretsError(
                'Unable to get Server Secret from the customer TA server.')

        try:
            data = json.loads(response.body)
        except ValueError:
            raise SecretsError('TA server response contains invalid JSON')

        if 'serverSecret' not in data:
            raise SecretsError('serverSecret not in response from TA server')

        return data["serverSecret"].decode("hex")
Пример #53
0
    async def get(self):
        """GET /oauth/authorization
        Render oauth confirmation page:
        "Server at ... would like permission to ...".
        Users accessing their own server or a blessed service
        will skip confirmation.
        """

        uri, http_method, body, headers = self.extract_oauth_params()
        try:
            scopes, credentials = self.oauth_provider.validate_authorization_request(
                uri, http_method, body, headers)
            credentials = self.add_credentials(credentials)
            client = self.oauth_provider.fetch_by_client_id(
                credentials['client_id'])
            if not self.needs_oauth_confirm(self.current_user, client):
                app_log.debug(
                    "Skipping oauth confirmation for %s accessing %s",
                    self.current_user,
                    client.description,
                )
                # this is the pre-1.0 behavior for all oauth
                self._complete_login(uri, headers, scopes, credentials)
                return

            # Render oauth 'Authorize application...' page
            auth_state = None
            self.render_template(
                "oauth.html",
                auth_state=auth_state,
                scopes=scopes,
                oauth_client=client,
            )

        # Errors that should be shown to the user on the provider website
        except oauth2.FatalClientError as e:
            raise web.HTTPError(e.status_code, e.description)

        # Errors embedded in the redirect URI back to the client
        except oauth2.OAuth2Error as e:
            app_log.error("OAuth error: %s", e.description)
            self.redirect(e.in_uri(e.redirect_uri))
Пример #54
0
 def do_thing(self, now):
     need = self.zcount(now, right=0)
     range_data = self.zrange2data(need, now)
     rsp = None
     for no, data in range_data.items():
         action = data and data.get("action")
         if action == "request":
             rsp = request(**(data.get(action) or {}))
         elif action == "xxx":
             # do other
             pass
         else:
             log.error(f"Delayer|do_thing|nothing|{data}")
         if rsp and rsp["status"] == 0:
             self.remove(no)
             log.info(f"Delayer|do_thing|sucess&rm|{no}")
         else:
             # TODO 应该加入重试队列,之后再实现
             log.error(f"Delayer|do_thing|rsp_error_but_rm|{no}")
             self.remove(no)
Пример #55
0
 def add_option(self, name, value):
     option = yield self.get_option(name=name)
     if not option:
         sql = "INSERT INTO t_options (`name`, `value`) VALUE (%(name)s, %(value)s)"
         with (yield pool.Connection()) as conn:
             with conn.cursor() as cursor:
                 try:
                     yield cursor.execute(sql, dict(name=name, value=value))
                 except pymysql.Error as e:
                     yield conn.rollback()
                     log.error('添加系统配置失败#{}'.format(e))
                     flag, msg = False, '添加系统配置失败#{}'.format(e)
                 else:
                     yield conn.commit()
                     log.info('添加系统配置成功')
                     flag, msg = cursor.lastrowid, '添加系统配置成功'
     else:
         log.error('系统配置已存在, 无法新增')
         flag, msg = False, '系统配置已存在, 无法新增'
     return flag, msg
Пример #56
0
 def add_message(self, user_id, m_type, content, status=1):
     sql = "INSERT INTO t_messages (`userId`, `type`, `content`, `status`) VALUE (%(userId)s, %(type)s, %(content)s, %(status)s)"
     with (yield pool.Connection()) as conn:
         with conn.cursor() as cursor:
             try:
                 yield cursor.execute(
                     sql,
                     dict(userId=user_id,
                          type=m_type,
                          content=json.dumps(content, ensure_ascii=False),
                          status=status))
             except pymysql.Error as e:
                 yield conn.rollback()
                 log.error('新增动态消息失败#{}'.format(e))
                 flag, msg = False, '新增动态消息失败#{}'.format(e)
             else:
                 yield conn.commit()
                 log.info('新增动态消息成功')
                 flag, msg = cursor.lastrowid, '新增动态消息成功'
     return flag, msg
Пример #57
0
 def get_email_for_user(self, userid):
     tries = 2
     while tries:
         try:
             userinfo = self._get_userinfo(userid)
             app_log.debug('Userinfo for %s: %s', userid, userinfo)
             if not userinfo.get('email', '').strip():
                 raise Exception('No email configured for user %s', userid)
             return Address(display_name=userinfo.get('display_name', ''),
                            addr_spec=userinfo['email'])
         except InvalidGrantError as e:
             # log this error
             app_log.error("Invalid Grant Error %s", e)
             self._fetch_token()
             tries -= 1
         except TokenExpiredError as e:
             # our refreshtoken is gone :(
             app_log.error("Token Expired Error %s", e)
             self._fetch_token()
             tries -= 1
Пример #58
0
 def add_project(self, name):
     pj = yield self.get_project(name=name)
     if not pj:
         sql = "INSERT INTO t_projects (`name`) VALUE (%(name)s)"
         with (yield pool.Connection()) as conn:
             with conn.cursor() as cursor:
                 try:
                     yield cursor.execute(sql, dict(name=name))
                 except pymysql.Error as e:
                     yield conn.rollback()
                     log.error('添加项目失败#{}'.format(e))
                     flag, msg = False, '添加项目失败#{}'.format(e)
                 else:
                     yield conn.commit()
                     log.info('添加项目成功')
                     flag, msg = cursor.lastrowid, '添加项目成功'
         return flag, msg
     else:
         log.error('项目 {} 已存在'.format(name))
         return False, '项目 {} 已存在'.format(name)
def future_set_exception_unless_cancelled(
        future: "Union[futures.Future[_T], Future[_T]]",
        exc: BaseException) -> None:
    """Set the given ``exc`` as the `Future`'s exception.

    If the Future is already canceled, logs the exception instead. If
    this logging is not desired, the caller should explicitly check
    the state of the Future and call ``Future.set_exception`` instead of
    this wrapper.

    Avoids ``asyncio.InvalidStateError`` when calling ``set_exception()`` on
    a cancelled `asyncio.Future`.

    .. versionadded:: 6.0

    """
    if not future.cancelled():
        future.set_exception(exc)
    else:
        app_log.error("Exception after Future was cancelled", exc_info=exc)
Пример #60
0
    def get(self, provider_prefix, spec):
        try:
            provider = self.get_provider(provider_prefix, spec=spec)
        except web.HTTPError:
            raise
        except Exception as e:
            app_log.error("Failed to construct provider for %s/%s")
            # FIXME: 400 assumes it's the user's fault (?)
            # maybe we should catch a special InvalidSpecError here
            raise web.HTTPError(400, str(e))

        self.render_template(
            "index.html",
            url=provider.get_repo_url(),
            ref=provider.unresolved_ref,
            filepath=self.get_argument('filepath', None),
            urlpath=self.get_argument('urlpath', None),
            submit=True,
            google_analytics_code=self.settings['google_analytics_code'],
        )