예제 #1
0
def create_app(environment):
    app = Flask(__name__)
    app.config.from_object(get_config(environment))

    app.register_blueprint(api)

    return app
예제 #2
0
def mock_config():
    """get config, used for testing default config"""
    _config = config.get_config()

    if _config is None:
        raise EnvironmentError("_config environment is not set.")

    return _config
예제 #3
0
    def test_get_config(self):
        variables = (("CONFIG_SETTING", REQUIRED, ""), ("CONFIG_OPTIONAL", "default", ""))

        source = dict(CONFIG_SETTING="hello")
        config = get_config(variables, source)
        self.assertEqual(config["CONFIG_SETTING"], "hello")
        self.assertEqual(config["CONFIG_OPTIONAL"], "default")

        source = dict(CONFIG_SETTING="hello", CONFIG_OPTIONAL="override")
        config = get_config(variables, source)
        self.assertEqual(config["CONFIG_SETTING"], "hello")
        self.assertEqual(config["CONFIG_OPTIONAL"], "override")

        with self.assertRaises(ConfigException):
            variables = (("CONFIG_SETTING", REQUIRED, ""),)
            source = dict()
            config = get_config(variables, source)
예제 #4
0
async def init_app():
    """
    Initializes web application.
    Gets config file, init routes...
    """
    _app = web.Application(logger=_logger)
    _app['config'] = config.get_config()
    executor = await worker.init_workers(_app)
    init_routes(_app, executor)
    return _app
예제 #5
0
    def test_get_config(self):
        variables = (
            ('CONFIG_SETTING', REQUIRED, ''),
            ('CONFIG_OPTIONAL', 'default', ''),
        )

        source = dict(CONFIG_SETTING='hello')
        config = get_config(variables, source)
        self.assertEqual(config['CONFIG_SETTING'], 'hello')
        self.assertEqual(config['CONFIG_OPTIONAL'], 'default')

        source = dict(CONFIG_SETTING='hello', CONFIG_OPTIONAL='override')
        config = get_config(variables, source)
        self.assertEqual(config['CONFIG_SETTING'], 'hello')
        self.assertEqual(config['CONFIG_OPTIONAL'], 'override')

        with self.assertRaises(ConfigException):
            variables = (('CONFIG_SETTING', REQUIRED, ''), )
            source = dict()
            config = get_config(variables, source)
def test_virus_scan_handler(mocker, data_builder, as_public, file_form):
    # prepare
    mock_set_virus_scan_state = mocker.patch(
        'api.callbacks.virus_scan_handler.liststorage.FileStorage.set_virus_scan_state'
    )
    mock_exec_op = mocker.patch(
        'api.callbacks.virus_scan_handler.liststorage.FileStorage.exec_op',
        return_value={'virus_scan': {
            'state': 'quarantined'
        }})
    acquisition_id = '000000000000000000000000'
    endpoint = 'callbacks/virus-scan/acquisitions/{}/files/test.csv'.format(
        acquisition_id)
    url = urljoin(config.get_config()['site']['api_url'].rstrip('/') + '/',
                  endpoint)
    # permission denied without signature
    r = as_public.post('/' + endpoint, json={'state': 'clean'})
    assert not r.ok
    assert r.status_code == 403

    signed_url = signed_urls.generate_signed_url(url, method='POST')
    parsed_url = urlparse.urlparse(signed_url)

    # valid request, clean
    r = as_public.post('/{}?{}'.format(endpoint, parsed_url.query),
                       json={'state': 'clean'})
    assert r.ok
    mock_set_virus_scan_state.assert_called_with(
        _id='000000000000000000000000',
        query_params={'name': 'test.csv'},
        state='clean')
    mock_set_virus_scan_state.reset_mock()

    # valid request, virus
    r = as_public.post('/{}?{}'.format(endpoint, parsed_url.query),
                       json={'state': 'virus'})
    assert r.ok
    mock_set_virus_scan_state.assert_called_with(
        _id='000000000000000000000000',
        query_params={'name': 'test.csv'},
        state='virus')

    # invalid payload
    r = as_public.post('/{}?{}'.format(endpoint, parsed_url.query),
                       json={'state': 'wrong state'})
    assert not r.ok

    # state already set
    mock_exec_op.return_value = {'virus_scan': {'state': 'clean'}}
    r = as_public.post('/{}?{}'.format(endpoint, parsed_url.query),
                       json={'state': 'virus'})
    assert not r.ok
예제 #7
0
def create_app():
   app = Flask(__name__)
   config_object = get_config()
   app.config.from_object(config_object)

   db.init_app(app)
   mail.init_app(app)
   cors.init_app(app, resources = {"*": {"origins": "*"}})

   from api.routes import university_api

   app.register_blueprint(university_api, url_prefix = os.environ.get("API_PREFIX", "/api"))

   return app
예제 #8
0
"""
@author:xda
@file:Msg.py
@time:2020/12/15
"""
import re

from api import receive
from api import reply
from api.model_process import accessIncrement
from api.config import get_config
from abc import abstractmethod, ABCMeta
from datahub import cb_info
from datahub.cb_info import CBInfo

CONFIG = get_config()
token = CONFIG['token']
MAX_COUNT = CONFIG['access_max']
IGNORE_RESPONSE = CONFIG['ignore']
LIMIT_RESPONSE = CONFIG['limit_response']


class MsgBase:

    def __init__(self, msg):
        self.msg = msg
        self.recMsg = self.parse(self.msg)

    def parse(self, msg):
        return receive.parse_xml(msg)
예제 #9
0
파일: collect.py 프로젝트: ehlertjd/core-1
def collect_db_metrics():
    """Collect metrics from mongodb, including version and job states"""
    try:
        # Get version info
        epoch = datetime(1970, 1, 1)
        version_info = config.get_version()
        if version_info:
            values.DB_VERSION.set(version_info.get('database', 0))

            release = version_info.get('release', 'UNKNOWN')
            values.RELEASE_VERSION.labels(release).set(1)

            flywheel_version = version_info.get('flywheel_release', 'UNKNOWN')
            values.FLYWHEEL_VERSION.labels(flywheel_version).set(1)

        # Get raw DB stats
        db_stats = config.db.command('dbstats')
        values.DB_DATA_SIZE.set(db_stats['dataSize'])
        values.DB_STORAGE_SIZE.set(db_stats['storageSize'])
        values.DB_OBJECTS.set(db_stats['objects'])

        # Get jobs info
        for state in JOB_STATES:
            count = config.db.jobs.count({'state': state})
            values.JOBS_BY_STATE.labels(state).set(count)

        # Find the oldest pending job
        oldest_jobs = list(config.db.jobs.find({'state': 'pending', 'created': {'$exists': 1}},
            {'created': 1}, sort=[('created', 1)], limit=1))
        if oldest_jobs:
            oldest_job_timestamp = (oldest_jobs[0]['created'] - epoch).total_seconds()
        else:
            oldest_job_timestamp = float('NaN')
        values.OLDEST_PENDING_JOB.set(oldest_job_timestamp)

        # Get raw collection stats
        for collection_name in config.db.list_collection_names():
            stats = config.db.command('collstats', collection_name)

            values.COLLECTION_COUNT.labels(collection_name).set(stats['count'])
            values.COLLECTION_SIZE.labels(collection_name).set(stats['size'])
            values.COLLECTION_STORAGE_SIZE.labels(collection_name).set(stats['storageSize'])
            for index_name, index_size in stats.get('indexSizes', {}).items():
                values.COLLECTION_INDEX_SIZE.labels(collection_name, index_name).set(index_size)
            values.COLLECTION_TOTAL_INDEX_SIZE.labels(collection_name).set(stats['totalIndexSize'])

        # Get access logs of type user login
        login_count = config.log_db.access_log.find({'access_type': 'user_login', 'origin.id': {'$regex': '@(?!flywheel\\.io)'}}).count()
        values.USER_LOGIN_COUNT.set(login_count)

        ### Last Event Times Collection
        # Get the last user_login
        last_event = config.log_db.access_log.find_one({'access_type': 'user_login', 'origin.id': {'$regex': '@(?!flywheel\\.io)'}},
                                                       sort=[('timestamp', -1)])
        if last_event:
            time_since = last_event['timestamp'] - epoch
            values.LAST_EVENT_TIME.labels('user_login').set(time_since.total_seconds())

        # Get the last session_creation
        last_event = config.db.sessions.find_one({}, sort=[('created', -1)])
        if last_event:
            time_since = last_event['created'] - epoch
            values.LAST_EVENT_TIME.labels('session_created').set(time_since.total_seconds())

        # Get the last job_queued by system and user
        last_event = config.db.jobs.find_one({'origin.type': 'system'}, sort=[('created', -1)])
        if last_event:
            time_since = last_event['created'] - epoch
            values.LAST_EVENT_TIME.labels('job_queued_by_system').set(time_since.total_seconds())

        last_event = config.db.jobs.find_one({'origin.type': 'user'}, sort=[('created', -1)])
        if last_event:
            time_since = last_event['created'] - epoch
            values.LAST_EVENT_TIME.labels('job_queued_by_user').set(time_since.total_seconds())

        # Get gear versions
        gear_count = 0
        job_count_by_gear = { d['_id']: d['count'] for d in config.db.jobs.aggregate([
            {
                '$group': {
                    '_id': '$gear_id',
                    'count': {'$sum':1}
                }
            }
        ])}

        for gear_doc in config.db.gears.find():
            gear = gear_doc.get('gear', {})
            name = gear.get('name', 'UNKNOWN')
            version = gear.get('version', 'UNKNOWN')
            created = str(gear_doc.get('created', 'UNKNOWN'))
            count = job_count_by_gear.get(str(gear_doc['_id']), 0)
            values.GEAR_VERSIONS.labels(name, version, created).set(count)
            gear_count = gear_count + 1
        values.COLLECTION_COUNT.labels('gears').set(gear_count)

        # Auth provider types
        auth_config = config.get_config()['auth']
        for key in AuthProviders.keys():
            values.AUTH_PROVIDER_TYPES.labels(key).set(int(key in auth_config))

        # Get devices
        device_count = 0
        device_storage = containerstorage.ContainerStorage('devices', use_object_id=True)
        devices = device_storage.get_all_el(None, None, None)
        device_statuses = get_device_statuses(devices)
        status_counts = {}
        for device in devices:
            device_id = str(device['_id'])
            device_type = device.get('type') or device.get('method') or 'UNKNOWN'
            device_name = device.get('name', 'UNKNOWN')
            last_seen = device.get('last_seen')
            if last_seen:
                since_last_seen = (datetime.now() - last_seen).total_seconds()
            else:
                since_last_seen = -1
            interval = device.get('interval', -1)

            # Set
            device_label = [device_type, device_name, device_id]
            values.DEVICE_TIME_SINCE_LAST_SEEN.labels(*device_label).set(since_last_seen)
            values.DEVICE_INTERVAL.labels(*device_label).set(interval)

            # Increment count by type
            device_status = device_statuses[device_id]['status']
            status_key = (device_type, device_status)
            current_count = status_counts.setdefault(status_key, 0)
            status_counts[status_key] = current_count + 1

            device_count = device_count + 1

        # Device count
        values.COLLECTION_COUNT.labels('devices').set(device_count)

        # Status count
        for label, count in status_counts.items():
            values.DEVICE_STATUS_COUNT.labels(*label).set(count)

    except: # pylint: disable=bare-except
        log.critical('Error collecting db metrics', exc_info=True)
예제 #10
0
def test_upload_with_virus_scan_enabled(mocker, as_public, as_user, as_drone, as_admin, data_builder, file_form, api_db):
    # setup
    mock_get_feature = mocker.patch('api.placer.config.get_feature', return_value={'virus_scan': True})
    mock_config = config.get_config()
    mock_config['webhooks']['virus_scan'] = None
    mock_get_feature = mocker.patch('api.placer.config.get_config', return_value=mock_config)
    mock_webhook_post = mocker.patch('api.webhooks.base.Session.post')
    orig_find = config.db['acquisitions'].find
    def wrap_find(*args, **kwargs):
        return orig_find(args[0])
    mocker.patch.object(config.db['acquisitions'], 'find', wraps=wrap_find)
    project = data_builder.create_project()
    session = data_builder.create_session()
    acquisition = data_builder.create_acquisition(session=session)

    gear_config = {'param': {'type': 'string', 'pattern': '^default|custom$', 'default': 'default'}}
    gear = data_builder.create_gear(gear={'version': '0.0.1', 'config': gear_config})

    # create rule
    rule_json = {
        'gear_id': gear,
        'name': 'csv-job-trigger-rule',
        'any': [],
        'not': [],
        'all': [
            {'type': 'file.type', 'value': 'tabular data'},
        ]
    }

    r = as_admin.post('/projects/' + project + '/rules', json=rule_json)
    assert r.ok
    rule = r.json['_id']

    # upload file as drone
    file_name = 'test.csv'
    r = as_drone.post('/acquisitions/' + acquisition + '/files', POST=file_form(file_name))
    assert r.ok
    # job was created via rule since the file was uploaded by a trusted origin
    gear_jobs = [job for job in api_db.jobs.find({'gear_id': gear})]
    assert len(gear_jobs) == 1
    # file uploaded by drone won't be quarantined
    r = as_drone.get('/acquisitions/' + acquisition + '/files/test.csv')
    assert r.ok

    uid = as_user.get('/users/self').json['_id']
    r = as_drone.post('/projects/' + project + '/permissions', json={'_id': uid, 'access': 'admin'})
    assert r.ok

    # user uploads fails if not webhook configure
    r = as_user.post('/acquisitions/' + acquisition + '/files', POST=file_form(
        file_name, meta={'name': file_name, 'type': 'csv'}))
    assert not r.ok
    assert r.status_code == 500

    # set webhook
    mock_config['webhooks']['virus_scan'] = 'http://localhost'
    mock_get_feature.return_value = mock_config

    # upload file as user
    r = as_user.post('/acquisitions/' + acquisition + '/files', POST=file_form((file_name, 'some;content')))
    assert r.ok

    # job was not created via rule since the file is quarantined
    gear_jobs = [job for job in api_db.jobs.find({'gear_id': gear})]
    assert len(gear_jobs) == 1

    # user uploaded file is quarantined
    r = as_user.get('/acquisitions/' + acquisition + '/files/test.csv')
    assert not r.ok
    assert r.status_code == 400

    _, kwargs = mock_webhook_post.call_args_list[0]
    webhook_payload = json.loads(kwargs['data'])
    # can download the file using the signed url
    parsed_url = urlparse.urlparse(webhook_payload['file_download_url'])
    download_endpoint = parsed_url.path.replace('/api', '')
    r = as_public.get('{}?{}'.format(download_endpoint, parsed_url.query))
    assert r.ok
    assert r.body == 'some;content'
    # can use the signed response url to send back the virus scan result
    parsed_url = urlparse.urlparse(webhook_payload['response_url'])
    response_endpoint = parsed_url.path.replace('/api', '')
    # mark the file as clean using
    r = as_public.post('{}?{}'.format(response_endpoint, parsed_url.query), json={'state': 'clean'})
    assert r.ok

    # now the file is accessible
    r = as_user.get('/acquisitions/' + acquisition + '/files/test.csv')
    assert r.ok

    # job was created via rule since the file is marked as clean
    gear_jobs = [job for job in api_db.jobs.find({'gear_id': gear})]
    assert len(gear_jobs) == 2
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from starlette_exporter import PrometheusMiddleware, handle_metrics

from api.config import get_config
from api.db import db
from api.public import api as public_api
from api.utils.logger import logger_config

logger = logger_config(__name__)

settings = get_config()
app = FastAPI(
    title=settings.PROJECT_NAME,
    version=settings.VERSION,
    description=settings.DESCRIPTION,
    docs_url="/",
    # openapi_tags=tags_metadata, # to provide custom information on the swagger
)

app.include_router(public_api)

app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

# prometheus metrics: https://prometheus.io/