Exemplo n.º 1
0
    def validate(cls, validator_context):
        config = validator_context.config

        if not "DEFAULT_TAG_EXPIRATION" in config:
            # Old style config
            return

        try:
            convert_to_timedelta(
                config["DEFAULT_TAG_EXPIRATION"]).total_seconds()
        except ValueError as ve:
            raise ConfigValidationException("Invalid default expiration: %s" %
                                            ve.message)

        if not config["DEFAULT_TAG_EXPIRATION"] in config.get(
                "TAG_EXPIRATION_OPTIONS", []):
            raise ConfigValidationException(
                "Default expiration must be in expiration options set")

        for ts in config.get("TAG_EXPIRATION_OPTIONS", []):
            try:
                convert_to_timedelta(ts)
            except ValueError as ve:
                raise ConfigValidationException(
                    "Invalid tag expiration option: %s" % ts)
Exemplo n.º 2
0
def change_tag_expiration(tag_id, expiration_datetime):
    """
    Changes the expiration of the specified tag to the given expiration datetime.

    If the expiration datetime is None, then the tag is marked as not expiring. Returns a tuple of
    the previous expiration timestamp in seconds (if any), and whether the operation succeeded.
    """
    try:
        tag = Tag.get(id=tag_id)
    except Tag.DoesNotExist:
        return (None, False)

    new_end_ms = None
    min_expire_sec = convert_to_timedelta(
        config.app_config.get("LABELED_EXPIRATION_MINIMUM", "1h"))
    max_expire_sec = convert_to_timedelta(
        config.app_config.get("LABELED_EXPIRATION_MAXIMUM", "104w"))

    if expiration_datetime is not None:
        lifetime_start_ts = int(tag.lifetime_start_ms // 1000)

        offset = timegm(expiration_datetime.utctimetuple()) - lifetime_start_ts
        offset = min(max(offset, min_expire_sec.total_seconds()),
                     max_expire_sec.total_seconds())
        new_end_ms = tag.lifetime_start_ms + (offset * 1000)

    if new_end_ms == tag.lifetime_end_ms:
        return (None, True)

    return set_tag_end_ms(tag, new_end_ms)
Exemplo n.º 3
0
def test_change_tag_expiration(expiration_offset, expected_offset, initialized_db):
  repository = create_repository('devtable', 'somenewrepo', None)
  image1 = find_create_or_link_image('foobarimage1', repository, None, {}, 'local_us')

  manifest = Manifest.get()
  footag = create_or_update_tag_for_repo(repository, 'foo', image1.docker_image_id,
                                         oci_manifest=manifest)

  expiration_date = None
  if expiration_offset is not None:
    expiration_date = datetime.utcnow() + convert_to_timedelta(expiration_offset)

  assert change_tag_expiration(footag, expiration_date)

  # Lookup the tag again.
  footag_updated = get_active_tag('devtable', 'somenewrepo', 'foo')
  oci_tag = _get_oci_tag(footag_updated)

  if expected_offset is None:
    assert footag_updated.lifetime_end_ts is None
    assert oci_tag.lifetime_end_ms is None
  else:
    start_date = datetime.utcfromtimestamp(footag_updated.lifetime_start_ts)
    end_date = datetime.utcfromtimestamp(footag_updated.lifetime_end_ts)
    expected_end_date = start_date + convert_to_timedelta(expected_offset)
    assert (expected_end_date - end_date).total_seconds() < 5 # variance in test

    assert oci_tag.lifetime_end_ms == (footag_updated.lifetime_end_ts * 1000)
Exemplo n.º 4
0
def test_validation_code(token_lifetime, time_since, initialized_db):
  user = create_user_noverify('foobar', '*****@*****.**', email_required=False)
  created = datetime.now() - convert_to_timedelta(time_since)
  verification_code, unhashed = Credential.generate()
  confirmation = EmailConfirmation.create(user=user, pw_reset=True,
                                          created=created, verification_code=verification_code)
  encoded = encode_public_private_token(confirmation.code, unhashed)

  with patch('data.model.config.app_config', {'USER_RECOVERY_TOKEN_LIFETIME': token_lifetime}):
    result = validate_reset_code(encoded)
    expect_success = convert_to_timedelta(token_lifetime) >= convert_to_timedelta(time_since)
    assert expect_success == (result is not None)
Exemplo n.º 5
0
    def retrieve(self, cache_key, loader, should_cache=is_not_none):
        not_found = [None]
        logger.debug("Checking cache for key %s", cache_key.key)
        result = self.cache.get(cache_key.key, default_value=not_found)
        if result != not_found:
            logger.debug("Found result in cache for key %s: %s", cache_key.key,
                         result)
            return json.loads(result)

        logger.debug("Found no result in cache for key %s; calling loader",
                     cache_key.key)
        result = loader()
        logger.debug("Got loaded result for key %s: %s", cache_key.key, result)
        if should_cache(result):
            logger.debug(
                "Caching loaded result for key %s with expiration %s: %s",
                cache_key.key,
                result,
                cache_key.expiration,
            )
            expires = convert_to_timedelta(
                cache_key.expiration) + datetime.now()
            self.cache.set(cache_key.key, json.dumps(result), expires=expires)
            logger.debug(
                "Cached loaded result for key %s with expiration %s: %s",
                cache_key.key,
                result,
                cache_key.expiration,
            )
        else:
            logger.debug("Not caching loaded result for key %s: %s",
                         cache_key.key, result)

        return result
Exemplo n.º 6
0
def validate_reset_code(token):
    # TODO(remove-unenc): Remove allow_public_only once migrated.
    allow_public_only = ActiveDataMigration.has_flag(ERTMigrationFlags.READ_OLD_FIELDS)
    result = decode_public_private_token(token, allow_public_only=allow_public_only)
    if not result:
        return None

    # Find the reset code.
    try:
        code = EmailConfirmation.get(
            EmailConfirmation.code == result.public_code, EmailConfirmation.pw_reset == True
        )
    except EmailConfirmation.DoesNotExist:
        return None

    if result.private_token and not code.verification_code.matches(result.private_token):
        return None

    # Make sure the code is not expired.
    max_lifetime_duration = convert_to_timedelta(config.app_config["USER_RECOVERY_TOKEN_LIFETIME"])
    if code.created + max_lifetime_duration < datetime.now():
        code.delete_instance()
        return None

    # Verify the user and return the code.
    user = code.user

    with db_transaction():
        if not user.verified:
            user.verified = True
            user.save()

        code.delete_instance()

    return user
Exemplo n.º 7
0
def validate_reset_code(token):
    result = decode_public_private_token(token)
    if not result:
        return None

    # Find the reset code.
    try:
        code = EmailConfirmation.get(
            EmailConfirmation.code == result.public_code,
            EmailConfirmation.pw_reset == True)
    except EmailConfirmation.DoesNotExist:
        return None

    if result.private_token and not code.verification_code.matches(
            result.private_token):
        return None

    # Make sure the code is not expired.
    max_lifetime_duration = convert_to_timedelta(
        config.app_config["USER_RECOVERY_TOKEN_LIFETIME"])
    if code.created + max_lifetime_duration < datetime.now():
        code.delete_instance()
        return None

    # Verify the user and return the code.
    user = code.user

    with db_transaction():
        if not user.verified:
            user.verified = True
            user.save()

        code.delete_instance()

    return user
Exemplo n.º 8
0
def common_login(user_uuid, permanent_session=True):
    """
    Performs login of the given user, with optional non-permanence on the session.

    Returns a tuple with (success, headers to set on success).
    """
    user = model.get_user(user_uuid)
    if user is None:
        return (False, None)

    if login_user(LoginWrappedDBUser(user_uuid)):
        logger.debug("Successfully signed in as user %s with uuid %s",
                     user.username, user_uuid)
        new_identity = QuayDeferredPermissionUser.for_id(user_uuid)
        identity_changed.send(app, identity=new_identity)
        session["login_time"] = datetime.datetime.now()

        if permanent_session and features.PERMANENT_SESSIONS:
            session_timeout_str = app.config.get("SESSION_TIMEOUT", "31d")
            session.permanent = True
            session.permanent_session_lifetime = convert_to_timedelta(
                session_timeout_str)

        # Force a new CSRF token.
        headers = {}
        headers[QUAY_CSRF_UPDATED_HEADER_NAME] = generate_csrf_token(
            force=True)
        return (True, headers)

    logger.debug("User could not be logged in, inactive?")
    return (False, None)
Exemplo n.º 9
0
def test_expiring_soon(initialized_db):
  user = model.user.get_user('devtable')

  # Create some tokens.
  create_token(user, 'Some token')
  exp_token = create_token(user, 'Some expiring token', datetime.now() + convert_to_timedelta('1d'))
  create_token(user, 'Some other token', expiration=datetime.now() + convert_to_timedelta('2d'))

  # Get the token expiring soon.
  expiring_soon = get_expiring_tokens(user, convert_to_timedelta('25h'))
  assert expiring_soon
  assert len(expiring_soon) == 1
  assert expiring_soon[0].id == exp_token.id

  expiring_soon = get_expiring_tokens(user, convert_to_timedelta('49h'))
  assert expiring_soon
  assert len(expiring_soon) == 2
Exemplo n.º 10
0
    def __init__(self):
        super(ExpiredAppSpecificTokenWorker, self).__init__()

        expiration_window = app.config.get("EXPIRED_APP_SPECIFIC_TOKEN_GC", "1d")
        self.expiration_window = convert_to_timedelta(expiration_window)

        logger.debug("Found expiration window: %s", expiration_window)
        self.add_operation(self._gc_expired_tokens, POLL_PERIOD_SECONDS)
Exemplo n.º 11
0
def test_expiring_soon(initialized_db):
    user = model.user.get_user("devtable")

    # Create some tokens.
    create_token(user, "Some token")
    exp_token = create_token(user, "Some expiring token",
                             datetime.now() + convert_to_timedelta("1d"))
    create_token(user,
                 "Some other token",
                 expiration=datetime.now() + convert_to_timedelta("2d"))

    # Get the token expiring soon.
    expiring_soon = get_expiring_tokens(user, convert_to_timedelta("25h"))
    assert expiring_soon
    assert len(expiring_soon) == 1
    assert expiring_soon[0].id == exp_token.id

    expiring_soon = get_expiring_tokens(user, convert_to_timedelta("49h"))
    assert expiring_soon
    assert len(expiring_soon) == 2
Exemplo n.º 12
0
def test_gc(expiration, initialized_db):
  user = model.user.get_user('devtable')

  expiration_date = None
  is_expired = False
  if expiration:
    if expiration[0] == '-':
      is_expired = True
      expiration_date = datetime.now() - convert_to_timedelta(expiration[1:])
    else:
      expiration_date = datetime.now() + convert_to_timedelta(expiration)

  # Create a token.
  token = create_token(user, 'Some token', expiration=expiration_date)

  # GC tokens.
  gc_expired_tokens(timedelta(seconds=0))

  # Ensure the token was GCed if expired and not if it wasn't.
  assert (access_valid_token(get_full_token_string(token)) is None) == is_expired
Exemplo n.º 13
0
Arquivo: tag.py Projeto: zhill/quay
def change_tag_expiration(tag, expiration_date):
    """ Changes the expiration of the given tag to the given expiration datetime. If
      the expiration datetime is None, then the tag is marked as not expiring.
  """
    end_ts = None
    min_expire_sec = convert_to_timedelta(
        config.app_config.get("LABELED_EXPIRATION_MINIMUM", "1h"))
    max_expire_sec = convert_to_timedelta(
        config.app_config.get("LABELED_EXPIRATION_MAXIMUM", "104w"))

    if expiration_date is not None:
        offset = timegm(expiration_date.utctimetuple()) - tag.lifetime_start_ts
        offset = min(max(offset, min_expire_sec.total_seconds()),
                     max_expire_sec.total_seconds())
        end_ts = tag.lifetime_start_ts + offset

    if end_ts == tag.lifetime_end_ts:
        return (None, True)

    return set_tag_end_ts(tag, end_ts)
Exemplo n.º 14
0
def test_create_access_token(expiration, default_expiration, initialized_db, app_config):
  user = model.user.get_user('devtable')
  expiration_date = datetime.now() + convert_to_timedelta(expiration) if expiration else None
  with patch.dict(_config.app_config, {}, clear=True):
    app_config['APP_SPECIFIC_TOKEN_EXPIRATION'] = default_expiration
    if expiration:
      exp_token = create_token(user, 'Some token', expiration=expiration_date)
      assert exp_token.expiration == expiration_date
    else:
      exp_token = create_token(user, 'Some token')
      assert (exp_token.expiration is None) == (default_expiration is None)
Exemplo n.º 15
0
    def retrieve(self, cache_key, loader, should_cache=is_not_none):
        # TODO: We might want to have different behavior based on `cache_key` (using "sets" for `/tags/list`, single value for others...)
        not_found = None
        if self.client is not None:
            logger.debug("Checking cache for key %s", cache_key.key)
            try:
                cached_result = self.client.get(cache_key.key)
                if cached_result != not_found:
                    cache_count.labels("hit").inc()
                    logger.debug("Found result in cache for key %s",
                                 cache_key.key)

                    return json.loads(cached_result)
                else:
                    cache_count.labels("miss").inc()
            except RedisError as re:
                logger.warning("Got exception when trying to retrieve key %s",
                               cache_key.key)

        logger.debug("Found no result in cache for key %s; calling loader",
                     cache_key.key)
        result = loader()
        logger.debug("Got loaded result for key %s: %s", cache_key.key, result)
        if self.client is not None and should_cache(result):
            try:
                logger.debug(
                    "Caching loaded result for key %s with expiration %s: %s",
                    cache_key.key,
                    result,
                    cache_key.expiration,
                )
                expires = (convert_to_timedelta(cache_key.expiration)
                           if cache_key.expiration else None)
                self.client.set(
                    cache_key.key,
                    json.dumps(result),
                    ex=int(expires.total_seconds()) if expires else None,
                    nx=True,
                )
                logger.debug(
                    "Cached loaded result for key %s with expiration %s: %s",
                    cache_key.key,
                    result,
                    cache_key.expiration,
                )
            except:
                logger.warning("Got exception when trying to set key %s to %s",
                               cache_key.key, result)
        else:
            logger.debug("Not caching loaded result for key %s: %s",
                         cache_key.key, result)

        return result
Exemplo n.º 16
0
    def retrieve(self, cache_key, loader, should_cache=is_not_none):
        not_found = [None]
        with memcache_client(self._get_client) as client:
            if client is not None:
                logger.debug("Checking cache for key %s", cache_key.key)
                try:
                    result = client.get(cache_key.key, default=not_found)
                    if result != not_found:
                        logger.debug("Found result in cache for key %s: %s",
                                     cache_key.key, result)
                        return result
                except:
                    logger.exception(
                        "Got exception when trying to retrieve key %s",
                        cache_key.key)

            logger.debug("Found no result in cache for key %s; calling loader",
                         cache_key.key)
            result = loader()
            logger.debug("Got loaded result for key %s: %s", cache_key.key,
                         result)
            if client is not None and should_cache(result):
                try:
                    logger.debug(
                        "Caching loaded result for key %s with expiration %s: %s",
                        cache_key.key,
                        result,
                        cache_key.expiration,
                    )
                    expires = (convert_to_timedelta(cache_key.expiration)
                               if cache_key.expiration else None)
                    client.set(
                        cache_key.key,
                        result,
                        expire=int(expires.total_seconds())
                        if expires else None,
                    )
                    logger.debug(
                        "Cached loaded result for key %s with expiration %s: %s",
                        cache_key.key,
                        result,
                        cache_key.expiration,
                    )
                except:
                    logger.exception(
                        "Got exception when trying to set key %s to %s",
                        cache_key.key, result)
            else:
                logger.debug("Not caching loaded result for key %s: %s",
                             cache_key.key, result)

            return result
Exemplo n.º 17
0
def _expires_after(label_dict, manifest, model):
    """ Sets the expiration of a manifest based on the quay.expires-in label. """
    try:
        timedelta = convert_to_timedelta(label_dict['value'])
    except ValueError:
        logger.exception('Could not convert %s to timedeltastring',
                         label_dict['value'])
        return

    total_seconds = timedelta.total_seconds()
    logger.debug('Labeling manifest %s with expiration of %s', manifest,
                 total_seconds)
    model.set_tags_expiration_for_manifest(manifest, total_seconds)
Exemplo n.º 18
0
    def get(self, parsed_args):
        """ Lists the app specific tokens for the user. """
        expiring = parsed_args["expiring"]
        if expiring:
            expiration = app.config.get("APP_SPECIFIC_TOKEN_EXPIRATION")
            token_expiration = convert_to_timedelta(expiration or _DEFAULT_TOKEN_EXPIRATION_WINDOW)
            seconds = math.ceil(token_expiration.total_seconds() * 0.1) or 1
            soon = timedelta(seconds=seconds)
            tokens = model.appspecifictoken.get_expiring_tokens(get_authenticated_user(), soon)
        else:
            tokens = model.appspecifictoken.list_tokens(get_authenticated_user())

        return {
            "tokens": [token_view(token, include_code=False) for token in tokens],
            "only_expiring": expiring,
        }
Exemplo n.º 19
0
def common_login(user_uuid, permanent_session=True):
    """ Performs login of the given user, with optional non-permanence on the session.
      Returns a tuple with (success, headers to set on success).
  """
    user = model.get_user(user_uuid)
    if user is None:
        return (False, None)

    if login_user(LoginWrappedDBUser(user_uuid)):
        logger.debug('Successfully signed in as user %s with uuid %s',
                     user.username, user_uuid)
        new_identity = QuayDeferredPermissionUser.for_id(user_uuid)
        identity_changed.send(app, identity=new_identity)
        session['login_time'] = datetime.datetime.now()

        if permanent_session and features.PERMANENT_SESSIONS:
            session_timeout_str = app.config.get('SESSION_TIMEOUT', '31d')
            session.permanent = True
            session.permanent_session_lifetime = convert_to_timedelta(
                session_timeout_str)

        # Inform our user analytics that we have a new "lead"
        create_lead_future = user_analytics.create_lead(
            user.email,
            user.username,
            user.given_name,
            user.family_name,
            user.company,
            user.location,
        )

        create_lead_future.add_done_callback(
            build_error_callback('Create lead failed'))

        # Force a new CSRF token.
        headers = {}
        headers[QUAY_CSRF_UPDATED_HEADER_NAME] = generate_csrf_token(
            force=True)
        return (True, headers)

    logger.debug('User could not be logged in, inactive?')
    return (False, None)
Exemplo n.º 20
0
from endpoints.decorators import check_anon_protection, require_xhr_from_browser, check_readonly
from util.metrics.prometheus import timed_blueprint
from util.names import parse_namespace_repository
from util.pagination import encrypt_page_token, decrypt_page_token
from util.request import get_request_ip
from util.timedeltastring import convert_to_timedelta

from __init__models_pre_oci import pre_oci_model as model


logger = logging.getLogger(__name__)
api_bp = timed_blueprint(Blueprint("api", __name__))


CROSS_DOMAIN_HEADERS = ["Authorization", "Content-Type", "X-Requested-With"]
FRESH_LOGIN_TIMEOUT = convert_to_timedelta(app.config.get("FRESH_LOGIN_TIMEOUT", "10m"))


class ApiExceptionHandlingApi(Api):
    @crossdomain(origin="*", headers=CROSS_DOMAIN_HEADERS)
    def handle_error(self, error):
        return super(ApiExceptionHandlingApi, self).handle_error(error)


api = ApiExceptionHandlingApi()
api.init_app(api_bp)
api.decorators = [
    csrf_protect(),
    crossdomain(origin="*", headers=CROSS_DOMAIN_HEADERS),
    process_oauth,
    require_xhr_from_browser,
Exemplo n.º 21
0
    def retarget_tag(
        self,
        repository_ref,
        tag_name,
        manifest_or_legacy_image,
        storage,
        legacy_manifest_key,
        is_reversion=False,
    ):
        """
        Creates, updates or moves a tag to a new entry in history, pointing to the manifest or
        legacy image specified.

        If is_reversion is set to True, this operation is considered a reversion over a previous tag
        move operation. Returns the updated Tag or None on error.
        """
        with db_disallow_replica_use():
            assert legacy_manifest_key is not None
            manifest = manifest_or_legacy_image.as_manifest()
            manifest_id = manifest._db_id

            # If the manifest is a schema 1 manifest and its tag name does not match that
            # specified, then we need to create a new manifest, but with that tag name.
            if manifest.media_type in DOCKER_SCHEMA1_CONTENT_TYPES:
                try:
                    parsed = manifest.get_parsed_manifest()
                except ManifestException:
                    logger.exception(
                        "Could not parse manifest `%s` in retarget_tag",
                        manifest._db_id,
                    )
                    return None

                if parsed.tag != tag_name:
                    logger.debug(
                        "Rewriting manifest `%s` for tag named `%s`",
                        manifest._db_id,
                        tag_name,
                    )

                    repository_id = repository_ref._db_id
                    updated = parsed.with_tag_name(tag_name, legacy_manifest_key)
                    assert updated.is_signed

                    created = oci.manifest.get_or_create_manifest(repository_id, updated, storage)
                    if created is None:
                        return None

                    manifest_id = created.manifest.id

            label_dict = next(
                (
                    label.asdict()
                    for label in self.list_manifest_labels(
                        manifest,
                        key_prefix="quay",
                    )
                    if label.key == LABEL_EXPIRY_KEY
                ),
                None,
            )

            expiration_seconds = None

            if label_dict is not None:
                try:
                    expiration_td = convert_to_timedelta(label_dict["value"])
                    expiration_seconds = expiration_td.total_seconds()
                except ValueError:
                    pass

            tag = oci.tag.retarget_tag(
                tag_name,
                manifest_id,
                is_reversion=is_reversion,
                expiration_seconds=expiration_seconds,
            )

            return Tag.for_tag(tag, self._legacy_image_id_handler)
Exemplo n.º 22
0
def _convert_to_s(timespan_string):
    """
    Returns the given timespan string (e.g. `2w` or `45s`) into seconds.
    """
    return convert_to_timedelta(timespan_string).total_seconds()
Exemplo n.º 23
0
from data.userfiles import DelegateUserfiles
from util.locking import GlobalLock, LockNotAcquiredException
from util.log import logfile_path
from util.streamingjsonencoder import StreamingJSONEncoder
from util.timedeltastring import convert_to_timedelta
from workers.worker import Worker

logger = logging.getLogger(__name__)

JSON_MIMETYPE = "application/json"
MIN_LOGS_PER_ROTATION = 5000
MEMORY_TEMPFILE_SIZE = 12 * 1024 * 1024

WORKER_FREQUENCY = app.config.get("ACTION_LOG_ROTATION_FREQUENCY",
                                  60 * 60 * 12)
STALE_AFTER = convert_to_timedelta(
    app.config.get("ACTION_LOG_ROTATION_THRESHOLD", "30d"))
MINIMUM_LOGS_AGE_FOR_ARCHIVE = convert_to_timedelta(
    app.config.get("MINIMUM_LOGS_AGE_FOR_ARCHIVE", "7d"))
SAVE_PATH = app.config.get("ACTION_LOG_ARCHIVE_PATH")
SAVE_LOCATION = app.config.get("ACTION_LOG_ARCHIVE_LOCATION")


class LogRotateWorker(Worker):
    """
    Worker used to rotate old logs out the database and into storage.
    """
    def __init__(self):
        super(LogRotateWorker, self).__init__()
        self.add_operation(self._archive_logs, WORKER_FREQUENCY)

    def _archive_logs(self):
Exemplo n.º 24
0
import time

import features

from app import app, authentication
from data.users.teamsync import sync_teams_to_groups
from workers.worker import Worker
from util.timedeltastring import convert_to_timedelta
from util.log import logfile_path


logger = logging.getLogger(__name__)


WORKER_FREQUENCY = app.config.get("TEAM_SYNC_WORKER_FREQUENCY", 60)
STALE_CUTOFF = convert_to_timedelta(app.config.get("TEAM_RESYNC_STALE_TIME", "30m"))


class TeamSynchronizationWorker(Worker):
    """ Worker which synchronizes teams with their backing groups in LDAP/Keystone/etc.
  """

    def __init__(self):
        super(TeamSynchronizationWorker, self).__init__()
        self.add_operation(self._sync_teams_to_groups, WORKER_FREQUENCY)

    def _sync_teams_to_groups(self):
        sync_teams_to_groups(authentication, STALE_CUTOFF)


def main():
Exemplo n.º 25
0
from data.userfiles import DelegateUserfiles
from util.locking import GlobalLock, LockNotAcquiredException
from util.log import logfile_path
from util.streamingjsonencoder import StreamingJSONEncoder
from util.timedeltastring import convert_to_timedelta
from workers.worker import Worker

logger = logging.getLogger(__name__)

JSON_MIMETYPE = 'application/json'
MIN_LOGS_PER_ROTATION = 5000
MEMORY_TEMPFILE_SIZE = 12 * 1024 * 1024

WORKER_FREQUENCY = app.config.get('ACTION_LOG_ROTATION_FREQUENCY',
                                  60 * 60 * 12)
STALE_AFTER = convert_to_timedelta(
    app.config.get('ACTION_LOG_ROTATION_THRESHOLD', '30d'))
MINIMUM_LOGS_AGE_FOR_ARCHIVE = convert_to_timedelta(
    app.config.get('MINIMUM_LOGS_AGE_FOR_ARCHIVE', '7d'))
SAVE_PATH = app.config.get('ACTION_LOG_ARCHIVE_PATH')
SAVE_LOCATION = app.config.get('ACTION_LOG_ARCHIVE_LOCATION')


class LogRotateWorker(Worker):
    """ Worker used to rotate old logs out the database and into storage. """
    def __init__(self):
        super(LogRotateWorker, self).__init__()
        self.add_operation(self._archive_logs, WORKER_FREQUENCY)

    def _archive_logs(self):
        cutoff_date = datetime.now() - STALE_AFTER
        try:
Exemplo n.º 26
0
import logging
import time

import features

from app import app, authentication
from data.users.teamsync import sync_teams_to_groups
from workers.worker import Worker
from util.timedeltastring import convert_to_timedelta
from util.log import logfile_path

logger = logging.getLogger(__name__)

WORKER_FREQUENCY = app.config.get('TEAM_SYNC_WORKER_FREQUENCY', 60)
STALE_CUTOFF = convert_to_timedelta(
    app.config.get('TEAM_RESYNC_STALE_TIME', '30m'))


class TeamSynchronizationWorker(Worker):
    """ Worker which synchronizes teams with their backing groups in LDAP/Keystone/etc.
  """
    def __init__(self):
        super(TeamSynchronizationWorker, self).__init__()
        self.add_operation(self._sync_teams_to_groups, WORKER_FREQUENCY)

    def _sync_teams_to_groups(self):
        sync_teams_to_groups(authentication, STALE_CUTOFF)


def main():
    logging.config.fileConfig(logfile_path(debug=False),
Exemplo n.º 27
0
    def create_manifest_and_retarget_tag(
        self, repository_ref, manifest_interface_instance, tag_name, storage, raise_on_error=False
    ):
        """
        Creates a manifest in a repository, adding all of the necessary data in the model.

        The `manifest_interface_instance` parameter must be an instance of the manifest
        interface as returned by the image/docker package.

        Note that all blobs referenced by the manifest must exist under the repository or this
        method will fail and return None.

        Returns a reference to the (created manifest, tag) or (None, None) on error, unless
        raise_on_error is set to True, in which case a CreateManifestException may also be
        raised.
        """
        with db_disallow_replica_use():
            # Get or create the manifest itself.
            created_manifest = oci.manifest.get_or_create_manifest(
                repository_ref._db_id,
                manifest_interface_instance,
                storage,
                for_tagging=True,
                raise_on_error=raise_on_error,
            )
            if created_manifest is None:
                return (None, None)

            wrapped_manifest = Manifest.for_manifest(
                created_manifest.manifest, self._legacy_image_id_handler
            )

            # Optional expiration label
            # NOTE: Since there is currently only one special label on a manifest that has an effect on its tags (expiration),
            #       it is just simpler to set that value at tag creation time (plus it saves an additional query).
            #       If we were to define more of these "special" labels in the future, we should use the handlers from
            #       data/registry_model/label_handlers.py
            if not created_manifest.newly_created:
                label_dict = next(
                    (
                        label.asdict()
                        for label in self.list_manifest_labels(
                            wrapped_manifest,
                            key_prefix="quay",
                        )
                        if label.key == LABEL_EXPIRY_KEY
                    ),
                    None,
                )
            else:
                label_dict = next(
                    (
                        dict(key=label_key, value=label_value)
                        for label_key, label_value in created_manifest.labels_to_apply.items()
                        if label_key == LABEL_EXPIRY_KEY
                    ),
                    None,
                )

            expiration_seconds = None

            if label_dict is not None:
                try:
                    expiration_td = convert_to_timedelta(label_dict["value"])
                    expiration_seconds = expiration_td.total_seconds()
                except ValueError:
                    pass

            # Re-target the tag to it.
            tag = oci.tag.retarget_tag(
                tag_name,
                created_manifest.manifest,
                raise_on_error=raise_on_error,
                expiration_seconds=expiration_seconds,
            )
            if tag is None:
                return (None, None)

            return (
                wrapped_manifest,
                Tag.for_tag(
                    tag, self._legacy_image_id_handler, manifest_row=created_manifest.manifest
                ),
            )
Exemplo n.º 28
0
def _default_expiration_duration():
    expiration_str = config.app_config.get("APP_SPECIFIC_TOKEN_EXPIRATION")
    return convert_to_timedelta(expiration_str) if expiration_str else None