Esempio n. 1
0
def get_env(config_uri, base_url):
    """
    Return a preconfigured paste environment object. Sets up the WSGI
    application and ensures that webassets knows to load files from
    ``h:static`` regardless of the ``webassets.base_dir`` setting.
    """
    request = Request.blank('', base_url=base_url)
    env = paster.bootstrap(config_uri, request)
    request.root = env['root']

    request.sentry = raven.Client(release=raven.fetch_package_version('h'))

    # Ensure that the webassets URL is absolute
    request.webassets_env.url = urlparse.urljoin(base_url,
                                                 request.webassets_env.url)

    # Disable webassets caching and manifest generation
    request.webassets_env.cache = False
    request.webassets_env.manifest = False

    # By default, webassets will use its base_dir setting as its search path.
    # When building extensions, we change base_dir so as to build assets
    # directly into the extension directories. As a result, we have to add
    # back the correct search path.
    request.webassets_env.append_path(resolve('h:static').abspath(),
                                      request.webassets_env.url)

    return env
Esempio n. 2
0
 def __init__(self):
     """Create the Eliot Observer"""
     if os.environ.get("SENTRY_DSN"):
         self.raven_client = raven.Client(release=raven.fetch_package_version("autopush"))
     else:
         self.raven_client = None
     self.logger = Logger()
Esempio n. 3
0
 def __init__(self, logger_name, log_level="debug", log_format="json",
              log_output="stdout", sentry_dsn=None,
              firehose_delivery_stream=None):
     self.logger_name = "-".join([
         logger_name,
         pkg_resources.get_distribution("autopush").version
     ])
     self._filename = None
     self.log_level = LogLevel.lookupByName(log_level)
     if log_output == "stdout":
         self._output = sys.stdout
     elif log_output == "none":
         self._output = None
     else:
         self._filename = log_output
         self._output = "file"
     if log_format == "json":
         self.format_event = self.json_format
     else:
         self.format_event = formatEventAsClassicLogText
     if sentry_dsn:
         self.raven_client = raven.Client(
             release=raven.fetch_package_version("autopush"))
     else:
         self.raven_client = None
     if firehose_delivery_stream:
         self.firehose = FirehoseProcessor(
             stream_name=firehose_delivery_stream)
     else:
         self.firehose = None
Esempio n. 4
0
def setup_sentry(name, channel, dsn):
    """
    Setup sentry account using taskcluster secrets
    """

    # Detect environment
    task_id = os.environ.get("TASK_ID")
    if task_id is not None:
        site = "taskcluster"
    elif "DYNO" in os.environ:
        site = "heroku"
    else:
        site = "unknown"

    sentry_client = raven.Client(
        dsn=dsn,
        site=site,
        name=name,
        environment=channel,
        release=raven.fetch_package_version(f"code-review-{name}"),
    )

    if task_id is not None:
        # Add a Taskcluster task id when available
        # It will be shown in the Additional Data section on the dashboard
        sentry_client.context.merge({"extra": {"task_id": task_id}})

    sentry_handler = raven.handlers.logbook.SentryHandler(
        sentry_client, level=logbook.WARNING, bubble=True
    )
    sentry_handler.push_application()
Esempio n. 5
0
 def __init__(self,
              logger_name,
              log_level="debug",
              log_format="json",
              log_output="stdout",
              sentry_dsn=None,
              firehose_delivery_stream=None):
     self.logger_name = "-".join(
         [logger_name,
          pkg_resources.get_distribution("autopush").version])
     self._filename = None
     self.log_level = LogLevel.lookupByName(log_level)
     if log_output == "stdout":
         self._output = sys.stdout
     elif log_output == "none":
         self._output = None
     else:
         self._filename = log_output
         self._output = "file"
     if log_format == "json":
         self.format_event = self.json_format
     else:
         self.format_event = formatEventAsClassicLogText
     if sentry_dsn:
         self.raven_client = raven.Client(
             release=raven.fetch_package_version("autopush"))
     else:
         self.raven_client = None
     if firehose_delivery_stream:
         self.firehose = FirehoseProcessor(
             stream_name=firehose_delivery_stream)
     else:
         self.firehose = None
Esempio n. 6
0
 def __init__(self):
     """Create the Eliot Observer"""
     if os.environ.get("SENTRY_DSN"):
         self.raven_client = raven.Client(
             release=raven.fetch_package_version("autopush"))
     else:
         self.raven_client = None
     self.logger = Logger()
Esempio n. 7
0
def get_client(request):
    """
    Get a Sentry client configured with context data for the current request.
    """
    client = raven.Client(release=raven.fetch_package_version('h'),
                          transport=GeventedHTTPTransport)
    client.http_context(http_context_data(request))
    client.user_context(user_context_data(request))
    return client
Esempio n. 8
0
    def __init__(self):
        super(AboutDialogInfo, self).__init__()

        self.Copyright = u"Copyright © 2003 - 2018 Broad Institute, Inc." \
                         u"\nAll rights reserved."

        self.Name = "CellProfiler"

        self.Version = raven.fetch_package_version("cellprofiler")
Esempio n. 9
0
    def __init__(self):
        super(AboutDialogInfo, self).__init__()

        self.Copyright = u"Copyright © 2003 - 2017 Broad Institute, Inc." \
                         u"\nAll rights reserved."

        self.Name = "CellProfiler"

        self.Version = raven.fetch_package_version("cellprofiler")
Esempio n. 10
0
    def __init__(self):
        super(AboutDialogInfo, self).__init__()

        self.SetCopyright(
            u"Copyright © 2003 - 2018 Broad Institute, Inc.\nAll rights reserved."
        )

        self.SetName("CellProfiler")

        self.SetVersion(raven.fetch_package_version("cellprofiler"))
Esempio n. 11
0
def get_client(settings):
    """
    Get a Sentry client configured with context data for the current request.
    """
    # If the `raven.transport` setting is set to 'gevent', then we use the
    # raven-supplied gevent compatible transport.
    transport_name = settings.get('raven.transport')
    transport = GeventedHTTPTransport if transport_name == 'gevent' else None

    client = raven.Client(release=raven.fetch_package_version('h'),
                          transport=transport)
    return client
Esempio n. 12
0
def get_current_release(app):
    try:
        current_release = fetch_package_version('solar')
    except pkg_resources.DistributionNotFound:
        pass
    else:
        return current_release

    try:
        current_release = fetch_git_sha(os.path.dirname(app.instance_path))
    except InvalidGitRepository:
        pass
    else:
        return current_release
Esempio n. 13
0
    def sentrytest(self):
        """return true if we managed to send a sample raven exception"""
        if not (self.config.sentry and self.config.sentry.dsn):
            raise SentryConfigurationError('sentry dsn not configured')

        try:
            version = raven.fetch_package_version('crontabber')
        except Exception:
            version = None
            self.config.logger.warning(
                'Unable to extract version of crontabber', exc_info=True)
        client = raven.Client(dsn=self.config.sentry.dsn, release=version)
        identifier = client.captureMessage('Sentry test sent from crontabber')
        self.config.logger.info('Sentry successful identifier: %s', identifier)
        return True
Esempio n. 14
0
async def gw_init(app):
    app.on_response_prepare.append(on_prepare)
    app.router.add_route('GET', r'/v{version:\d+}', hello)
    app['status'] = GatewayStatus.STARTING
    app['datadog'] = DummyDatadog()
    app['sentry'] = DummySentry()
    if datadog_available:
        if app.config.datadog_api_key is None:
            log.warning('datadog logging disabled (missing API key)')
        else:
            datadog.initialize(api_key=app.config.datadog_api_key,
                               app_key=app.config.datadog_app_key)
            app['datadog'] = datadog
            log.info('datadog logging enabled')
    if raven_available:
        if app.config.raven_uri is None:
            log.info(
                'skipping Sentry initialization due to missing DSN URI...')
        else:
            app['sentry'] = raven.Client(
                app.config.raven_uri,
                release=raven.fetch_package_version('backend.ai-manager'))
            log.info('sentry logging enabled')

    app['dbpool'] = await create_engine(
        host=app.config.db_addr[0],
        port=app.config.db_addr[1],
        user=app.config.db_user,
        password=app.config.db_password,
        dbname=app.config.db_name,
        echo=bool(app.config.verbose),
        minsize=4,
        maxsize=16,
        timeout=30,
        pool_recycle=30,
    )
    app['redis_stat_pool'] = await aioredis.create_redis_pool(
        app.config.redis_addr.as_sockaddr(),
        timeout=3.0,
        encoding='utf8',
        db=REDIS_STAT_DB)
    app.middlewares.append(exception_middleware_factory)
    app.middlewares.append(api_middleware_factory)
Esempio n. 15
0
    def sentrytest(self):
        """return true if we managed to send a sample raven exception"""
        if not (self.config.sentry and self.config.sentry.dsn):
            raise SentryConfigurationError('sentry dsn not configured')

        try:
            version = raven.fetch_package_version('crontabber')
        except Exception:
            version = None
            self.config.logger.warning(
                'Unable to extract version of crontabber',
                exc_info=True
            )
        client = raven.Client(
            dsn=self.config.sentry.dsn,
            release=version
        )
        identifier = client.captureMessage(
            'Sentry test sent from crontabber'
        )
        self.config.logger.info(
            'Sentry successful identifier: %s', identifier
        )
        return True
Esempio n. 16
0
ALLOWED_DOMAINS: Set = set(
    dm.strip() for dm in os.getenv('ALLOWED_DOMAINS', '').split(',')
    if dm.strip())
CACHE_LIVE_TIME: int = int(os.getenv('CACHE_LIVE_TIME', 3600))
SENTRY_DSN: Optional[str] = os.getenv('SENTRY_DSN')
_ENABLE_CB = is_yesish(os.getenv('ENABLE_CIRCUIT_BREAKER', '0'))
_CB_FAIL_MAX: int = int(os.getenv('CIRCUIT_BREAKER_FAIL_MAX', 5))
_CB_RESET_TIMEOUT: int = int(os.getenv('CIRCUIT_BREAKER_RESET_TIMEOUT', 60))
_BREAKERS = defaultdict(lambda: Failsafe(circuit_breaker=CircuitBreaker(
    maximum_failures=_CB_FAIL_MAX, reset_timeout_seconds=_CB_RESET_TIMEOUT)))

if SENTRY_DSN:
    sentry = raven.Client(
        SENTRY_DSN,
        transport=AioHttpTransport,
        release=raven.fetch_package_version('prerender'),
        site='Prerender',
    )
else:
    sentry = None


def _save_to_cache(key: str, data: bytes, format: str = 'html') -> None:
    try:
        cache.set(key, data, CACHE_LIVE_TIME, format)
    except Exception:
        logger.exception('Error writing cache')
        if sentry:
            sentry.captureException()

Esempio n. 17
0
async def gw_init(app):
    # should be done in create_app() in other modules.
    app.router.add_route('GET', r'', hello)
    app.on_response_prepare.append(on_prepare)

    # legacy redirects
    app.router.add_route('GET', '/v{version:\d+}/authorize',
                         lambda request: web.HTTPFound('/v3/auth/test'))

    # populate public interfaces
    app['config_server'] = ConfigServer(app['config'].etcd_addr,
                                        app['config'].namespace)

    app['status'] = GatewayStatus.STARTING
    app['datadog'] = DummyDatadog()
    app['sentry'] = DummySentry()
    app['datadog.enabled'] = False
    app['sentry.enabled'] = False
    if datadog_available:
        if app['config'].datadog_api_key is None:
            log.warning('Datadog logging is disabled (missing API key).')
        else:
            datadog.initialize(api_key=app['config'].datadog_api_key,
                               app_key=app['config'].datadog_app_key)
            app['datadog'] = datadog
            app['datadog.enabled'] = True
            log.info('Datadog logging is enabled.')
    if raven_available:
        if app['config'].raven_uri is None:
            log.warning(
                'Sentry error reporting is disabled (missing DSN URI).')
        else:
            app['sentry'] = raven.Client(
                app['config'].raven_uri,
                release=raven.fetch_package_version('backend.ai-manager'))
            app['sentry.enabled'] = True
            log.info('Sentry error reporting is enabled.')

    app['dbpool'] = await create_engine(
        host=app['config'].db_addr[0],
        port=app['config'].db_addr[1],
        user=app['config'].db_user,
        password=app['config'].db_password,
        dbname=app['config'].db_name,
        echo=bool(app['config'].verbose),
        # TODO: check the throughput impacts of DB/redis pool sizes
        minsize=4,
        maxsize=16,
        timeout=30,
        pool_recycle=30,
    )
    app['redis_live'] = await aioredis.create_redis(
        app['config'].redis_addr.as_sockaddr(),
        timeout=3.0,
        encoding='utf8',
        db=REDIS_LIVE_DB)
    app['redis_stat'] = await aioredis.create_redis(
        app['config'].redis_addr.as_sockaddr(),
        timeout=3.0,
        encoding='utf8',
        db=REDIS_STAT_DB)
    app['redis_image'] = await aioredis.create_redis(
        app['config'].redis_addr.as_sockaddr(),
        timeout=3.0,
        encoding='utf8',
        db=REDIS_IMAGE_DB)

    loop = asyncio.get_event_loop()
    dispatcher = EventDispatcher(app)
    app['event_dispatcher'] = dispatcher
    app['event_subscriber'] = loop.create_task(event_subscriber(dispatcher))

    app['registry'] = AgentRegistry(app['config_server'], app['dbpool'],
                                    app['redis_stat'], app['redis_live'],
                                    app['redis_image'])
    await app['registry'].init()
Esempio n. 18
0
AUTO_MERGE_APPROVAL_COUNT = int(os.environ.get('AUTO_MERGE_APPROVAL_COUNT', 3))

BITBUCKET_OAUTH_KEY = os.environ.get('BITBUCKET_OAUTH_KEY', '')
BITBUCKET_OAUTH_SECRET = os.environ.get('BITBUCKET_OAUTH_SECRET', '')

BITBUCKET_USERNAME = os.environ.get('BITBUCKET_USERNAME', '')
BITBUCKET_PASSWORD = os.environ.get('BITBUCKET_PASSWORD', '')

BADWOLF_DATA_DIR = os.environ.get('BADWOLF_DATA_DIR', '/var/lib/badwolf')
BADWOLF_LOG_DIR = os.environ.get('BADWOLF_LOG_DIR',
                                 os.path.join(BADWOLF_DATA_DIR, 'log'))
BADWOLF_REPO_DIR = os.environ.get('BADWOLF_REPO_DIR',
                                  os.path.join(BADWOLF_DATA_DIR, 'repos'))
if DEBUG:
    if platform.system() == 'Darwin':
        # On macOS, tempfile.gettempdir function doesn't return '/tmp'
        # But Docker for Mac can not mount the path returned by tempfile.gettempdir
        # by default, so let's hardcode it to '/tmp'
        BADWOLF_REPO_DIR = '/tmp/badwolf'  # nosec
    else:
        BADWOLF_REPO_DIR = os.path.join(tempfile.gettempdir(), 'badwolf')

# Sentry Release
try:
    SENTRY_RELEASE = raven.fetch_package_version('badwolf')
except Exception:  # pragma: no cover
    pass

# deansi color override
deansi.variations[0] = ('black', '#333', 'gray')
Esempio n. 19
0
    tags['os'] = 'OSX_{}'.format(platform.mac_ver()[0])
elif sys.platform.startswith('win32'):
    tags['os'] = 'Windows_{}'.format(platform.win32_ver()[1])
else:
    tags['os'] = '{}'.format(platform.linux_distribution()[0])

try:
    tags['gpu'] = gpulist()
except CUDAbinException:
    tags['gpu'] = 'no_cudabin'
    logger.error("CUDAbinException: Could not get gpulist")

tags['pyqt'] = QtCore.QT_VERSION_STR
for p in ('numpy', 'pyopencl', 'pyopengl', 'spimagine', 'gputools', 'llspy'):
    try:
        tags[p] = fetch_package_version(p)
    except Exception:
        pass

ip = ''
try:
    ip = re.search('"([0-9.]*)"',
                   str(urlopen("http://ip.jsontest.com/").read())).group(1)
except Exception:
    pass

client = Client(
    'https://*****:*****@sentry.io/221111',
    release=llspy.__version__,
    include_paths=['llspy', 'spimagine', 'gputools'],
    environment=env,
Esempio n. 20
0
SENTRY_DSN: Optional[str] = os.getenv('SENTRY_DSN')
_ENABLE_CB = is_yesish(os.getenv('ENABLE_CIRCUIT_BREAKER', '0'))
_CB_FAIL_MAX: int = int(os.getenv('CIRCUIT_BREAKER_FAIL_MAX', 5))
_CB_RESET_TIMEOUT: int = int(os.getenv('CIRCUIT_BREAKER_RESET_TIMEOUT', 60))
_BREAKERS = defaultdict(
    lambda: Failsafe(circuit_breaker=CircuitBreaker(
        maximum_failures=_CB_FAIL_MAX,
        reset_timeout_seconds=_CB_RESET_TIMEOUT
    ))
)

if SENTRY_DSN:
    sentry = raven.Client(
        SENTRY_DSN,
        transport=AioHttpTransport,
        release=raven.fetch_package_version('prerender'),
        site='Prerender',
    )
else:
    sentry = None


def _save_to_cache(key: str, data: bytes, format: str = 'html') -> None:
    try:
        cache.set(key, data, CACHE_LIVE_TIME, format)
    except Exception:
        logger.exception('Error writing cache')
        if sentry:
            sentry.captureException()

Esempio n. 21
0
import asyncio
import logging.config

import click
import prometheus_client
import raven

from . import _tls as tls
from ._click import AsyncCommand
from ._server import Server
from .bigquery import BigQueryClient
from .core import Linehaul


__version__ = raven.fetch_package_version("linehaul")


@click.command(
    cls=AsyncCommand,
    context_settings={"auto_envvar_prefix": "LINEHAUL"},
)
@click.option("--bind", default="0.0.0.0")
@click.option("--port", type=int, default=512)
@click.option("--token")
@click.option("--account")
@click.option("--key", type=click.File("r"))
@click.option("--reuse-port/--no-reuse-port", default=True)
@click.option(
    "--tls-ciphers",
    default="ECDHE+CHACHA20:ECDH+AES128GCM:ECDH+AES128:!SHA:!aNULL:!eNULL",
Esempio n. 22
0
File: cli.py Progetto: pypa/linehaul
import asyncio
import logging.config

import click
import prometheus_client
import raven

from . import _tls as tls
from ._click import AsyncCommand
from ._server import Server
from .bigquery import BigQueryClient
from .core import Linehaul


__version__ = raven.fetch_package_version("linehaul")


@click.command(
    cls=AsyncCommand,
    context_settings={"auto_envvar_prefix": "LINEHAUL"},
)
@click.option("--bind", default="0.0.0.0")
@click.option("--port", type=int, default=512)
@click.option("--token")
@click.option("--account")
@click.option("--key", type=click.File("r"))
@click.option("--reuse-port/--no-reuse-port", default=True)
@click.option(
    "--tls-ciphers",
    default="ECDHE+CHACHA20:ECDH+AES128GCM:ECDH+AES128:!SHA:!aNULL:!eNULL",