Example #1
0
    def test_default(self):
        cache = Cache(4)
        cache.add(1)
        cache.add('q')
        cache.add(0.99)
        cache.add('q')
        cache.add(0.988)

        self.assertEqual(cache.length, 4)

        cache.remove('q')
        self.assertEqual(cache.length, 2)

        cache = Cache(3)
        cache.add(1)
        cache.add('q')
        cache.add('a')
        cache.add('b')
        cache.add('c')
        self.assertEqual(cache.length, 3)

        values = cache.cache
        self.assertEqual(values[0], 'a')
        self.assertEqual(values[1], 'b')
        self.assertEqual(values[2], 'c')

        self.assertFalse(cache.exists(2))
        self.assertTrue(cache.exists('c'))
Example #2
0
    def test_size_zero_and_less(self):
        cache = Cache(0)
        cache.add(2)
        cache.add(3)
        cache.add(4)
        cache.add(5)
        self.assertFalse(cache.exists(2))

        cache = Cache(-8)
        cache.add(2)
        cache.add(3)
        cache.add(4)
        cache.add(5)
        self.assertFalse(cache.exists(2))
 def save_batch(batch_number, path, correct_size, data, targets,
                num_classes):
     # transformacja danych do formy, która może być wykorzystana bezpośrednio na sieci
     data_array = DataUtils.get_array(correct_size, data)
     target_array = DataUtils.get_target_array(num_classes, targets)
     cache = Cache(data_array, target_array)
     # serializacja i zapis na dysku
     DataUtils.pickle_data(cache, path,
                           'cache' + str(batch_number) + '.pkl')
     return cache
Example #4
0
    def onStart(self):
        if not self.config['offline_mode']:
            self.login()
            self.session.get_article = self.get_article

        self.cache = Cache(APPNAME, AUTHOR)
        if self.config['clear_cache']:
            self.cache.clear()

        if not self.config['offline_mode']:
            self.cache.fetch_new(self.session, APPNAME, AUTHOR)

        if self.config['update_only']:
            sys.exit(0)

        self.addForm('MAIN', ArticlePicker, name='Article Picker')
        self.addForm('READER', ArticleReader, name='Article Reader')
Example #5
0
def get_tz(location):
    cache = Cache()
    if location is None:
        return None
    timezone = cache.timezone_cached(location)
    if timezone:
        return timezone
    try:
        place, (lat, lng) = geo.geocode(location, timeout=10)
    except GeocoderTimedOut as e:
        print("Error: geocode failed on input %s with message %s" % (location, e.message))
        return None
    except TypeError:
        return None
    tz = geo.timezone((lat, lng))
    timezone = tz.zone
    cache.save_to_cache(location, timezone)
    return timezone
Example #6
0
def createApp():
    SETTINGS = {
        "static_path": config.STATIC_PATH,
        "template_path": config.TEMPLATE_PATH,
        "cookie_secret": config.COOKIE_SECRET,
        "login_url": "/login",
        #"xsrf_cookies": config[default].XSRF_COOKIES,
        "debug": config.DEBUG,
        "gzip": config.GZIP,
    }

    app = tornado.web.Application(handlers=HANDLERS, **SETTINGS)
    app.config = config
    app.remote = RpycController(app.config)
    app.db = SQLClient(config)
    app.cache = Cache(config)
    app.data = Data(app.db, app.cache, app.remote)
    app.static = static(app.config)
    MinitrillLog.log_init(app)
    return app
Example #7
0
async def create_cache(cache_key, content, minutes=30):

    pickle_protocol = pickle.HIGHEST_PROTOCOL

    cache = Cache.filter(cache_key=cache_key)
    cache_exists = await cache.exists()

    if not cache_exists:

        expires = datetime.utcnow() + timedelta(minutes=minutes)
        pickled = pickle.dumps(content, pickle_protocol)
        b64encoded = base64.b64encode(pickled).decode('latin1')

        try:
            cache = Cache(cache_key=cache_key,
                          content=b64encoded,
                          expires=expires)

            await cache.save()
            return True
        except:
            return False

    return False
def addto_cache(source, target, text, translated, msg_id, match):
    cache_row = Cache(source, target, text, translated, msg_id, match)
    db.session.add(cache_row)
    db.session.commit()
Example #9
0
def save_profile_cache(screen_name, cache_data):
    clear_profile_cache(screen_name)
    cache = Cache(name='profile:' + screen_name.lower(),
                  value=encoder.encode(cache_data))
    cache.put()
from settings import settings

import time

from models import Runner, Cache
from utils import CSVReader, HttpClient

http_client = HttpClient(
    settings.API_ENDPOINTS,
    settings.STREAMELEMENT_ACCOUNT_ID,
    settings.STREAMELEMENT_JWT_TOKEN,
)
csv_reader = CSVReader(settings.FILENAME, settings.CSV_FORMAT)
cache = Cache(settings.CACHE_SIZE)

runner = Runner(
    csv_reader.get_contests(),
    cache,
    http_client,
    settings.CONTEST_MIN_BET,
    settings.CONTEST_MAX_BET,
    settings.CONTEST_DURATION_SECONDS,
)

nb_seconds_to_bet = round(settings.CONTEST_DURATION_SECONDS * 0.75)
nb_seconds_rest = settings.CONTEST_DURATION_SECONDS - nb_seconds_to_bet

runner.refund_contests()

while True:
    runner.next_contest()
Example #11
0
def parse_yaml_config(config_file_path, with_notary, with_trivy,
                      with_chartmuseum):
    '''
    :param configs: config_parser object
    :returns: dict of configs
    '''

    with open(config_file_path) as f:
        configs = yaml.safe_load(f)

    config_dict = {
        'portal_url': 'http://portal:8080',
        'registry_url': 'http://registry:5000',
        'registry_controller_url': 'http://registryctl:8080',
        'core_url': 'http://core:8080',
        'core_local_url': 'http://127.0.0.1:8080',
        'token_service_url': 'http://core:8080/service/token',
        'jobservice_url': 'http://jobservice:8080',
        'trivy_adapter_url': 'http://trivy-adapter:8080',
        'notary_url': 'http://notary-server:4443',
        'chart_repository_url': 'http://chartmuseum:9999'
    }

    config_dict['hostname'] = configs["hostname"]

    config_dict['protocol'] = 'http'
    http_config = configs.get('http') or {}
    config_dict['http_port'] = http_config.get('port', 80)

    https_config = configs.get('https')
    if https_config:
        config_dict['protocol'] = 'https'
        config_dict['https_port'] = https_config.get('port', 443)
        config_dict['cert_path'] = https_config["certificate"]
        config_dict['cert_key_path'] = https_config["private_key"]

    if configs.get('external_url'):
        config_dict['public_url'] = configs.get('external_url')
    else:
        if config_dict['protocol'] == 'https':
            if config_dict['https_port'] == 443:
                config_dict['public_url'] = '{protocol}://{hostname}'.format(
                    **config_dict)
            else:
                config_dict[
                    'public_url'] = '{protocol}://{hostname}:{https_port}'.format(
                        **config_dict)
        else:
            if config_dict['http_port'] == 80:
                config_dict['public_url'] = '{protocol}://{hostname}'.format(
                    **config_dict)
            else:
                config_dict[
                    'public_url'] = '{protocol}://{hostname}:{http_port}'.format(
                        **config_dict)

    # DB configs
    db_configs = configs.get('database')
    if db_configs:
        # harbor db
        config_dict['harbor_db_host'] = 'postgresql'
        config_dict['harbor_db_port'] = 5432
        config_dict['harbor_db_name'] = 'registry'
        config_dict['harbor_db_username'] = '******'
        config_dict['harbor_db_password'] = db_configs.get("password") or ''
        config_dict['harbor_db_sslmode'] = 'disable'
        config_dict['harbor_db_max_idle_conns'] = db_configs.get(
            "max_idle_conns") or default_db_max_idle_conns
        config_dict['harbor_db_max_open_conns'] = db_configs.get(
            "max_open_conns") or default_db_max_open_conns

        if with_notary:
            # notary signer
            config_dict['notary_signer_db_host'] = 'postgresql'
            config_dict['notary_signer_db_port'] = 5432
            config_dict['notary_signer_db_name'] = 'notarysigner'
            config_dict['notary_signer_db_username'] = '******'
            config_dict['notary_signer_db_password'] = '******'
            config_dict['notary_signer_db_sslmode'] = 'disable'
            # notary server
            config_dict['notary_server_db_host'] = 'postgresql'
            config_dict['notary_server_db_port'] = 5432
            config_dict['notary_server_db_name'] = 'notaryserver'
            config_dict['notary_server_db_username'] = '******'
            config_dict['notary_server_db_password'] = '******'
            config_dict['notary_server_db_sslmode'] = 'disable'

    # Data path volume
    config_dict['data_volume'] = configs['data_volume']

    # Initial Admin Password
    config_dict['harbor_admin_password'] = configs["harbor_admin_password"]

    # Registry storage configs
    storage_config = configs.get('storage_service') or {}

    config_dict['registry_custom_ca_bundle_path'] = storage_config.get(
        'ca_bundle') or ''

    if storage_config.get('filesystem'):
        config_dict['storage_provider_name'] = 'filesystem'
        config_dict['storage_provider_config'] = storage_config['filesystem']
    elif storage_config.get('azure'):
        config_dict['storage_provider_name'] = 'azure'
        config_dict['storage_provider_config'] = storage_config['azure']
    elif storage_config.get('gcs'):
        config_dict['storage_provider_name'] = 'gcs'
        config_dict['storage_provider_config'] = storage_config['gcs']
    elif storage_config.get('s3'):
        config_dict['storage_provider_name'] = 's3'
        config_dict['storage_provider_config'] = storage_config['s3']
    elif storage_config.get('swift'):
        config_dict['storage_provider_name'] = 'swift'
        config_dict['storage_provider_config'] = storage_config['swift']
    elif storage_config.get('oss'):
        config_dict['storage_provider_name'] = 'oss'
        config_dict['storage_provider_config'] = storage_config['oss']
    else:
        config_dict['storage_provider_name'] = 'filesystem'
        config_dict['storage_provider_config'] = {}

    if storage_config.get('redirect'):
        config_dict['storage_redirect_disabled'] = storage_config['redirect'][
            'disabled']

    # Global proxy configs
    proxy_config = configs.get('proxy') or {}
    proxy_components = proxy_config.get('components') or []
    no_proxy_config = proxy_config.get('no_proxy')
    all_no_proxy = INTERNAL_NO_PROXY_DN
    if no_proxy_config:
        all_no_proxy |= set(no_proxy_config.split(','))

    for proxy_component in proxy_components:
        config_dict[proxy_component +
                    '_http_proxy'] = proxy_config.get('http_proxy') or ''
        config_dict[proxy_component +
                    '_https_proxy'] = proxy_config.get('https_proxy') or ''
        config_dict[proxy_component + '_no_proxy'] = ','.join(all_no_proxy)

    # Trivy configs, optional
    trivy_configs = configs.get("trivy") or {}
    config_dict['trivy_github_token'] = trivy_configs.get("github_token") or ''
    config_dict['trivy_skip_update'] = trivy_configs.get(
        "skip_update") or False
    config_dict['trivy_offline_scan'] = trivy_configs.get(
        "offline_scan") or False
    config_dict['trivy_ignore_unfixed'] = trivy_configs.get(
        "ignore_unfixed") or False
    config_dict['trivy_insecure'] = trivy_configs.get("insecure") or False
    config_dict['trivy_timeout'] = trivy_configs.get("timeout") or '5m0s'

    # Chart configs
    chart_configs = configs.get("chart") or {}
    if chart_configs.get('absolute_url') == 'enabled':
        config_dict['chart_absolute_url'] = True
    else:
        config_dict['chart_absolute_url'] = False

    # jobservice config
    js_config = configs.get('jobservice') or {}
    config_dict['max_job_workers'] = js_config["max_job_workers"]
    config_dict['jobservice_secret'] = generate_random_string(16)

    # notification config
    notification_config = configs.get('notification') or {}
    config_dict['notification_webhook_job_max_retry'] = notification_config[
        "webhook_job_max_retry"]

    # Log configs
    allowed_levels = ['debug', 'info', 'warning', 'error', 'fatal']
    log_configs = configs.get('log') or {}

    log_level = log_configs['level']
    if log_level not in allowed_levels:
        raise Exception(
            'log level must be one of debug, info, warning, error, fatal')
    config_dict['log_level'] = log_level.lower()

    # parse local log related configs
    local_logs = log_configs.get('local') or {}
    if local_logs:
        config_dict['log_location'] = local_logs.get(
            'location') or '/var/log/harbor'
        config_dict['log_rotate_count'] = local_logs.get('rotate_count') or 50
        config_dict['log_rotate_size'] = local_logs.get(
            'rotate_size') or '200M'

    # parse external log endpoint related configs
    if log_configs.get('external_endpoint'):
        config_dict['log_external'] = True
        config_dict['log_ep_protocol'] = log_configs['external_endpoint'][
            'protocol']
        config_dict['log_ep_host'] = log_configs['external_endpoint']['host']
        config_dict['log_ep_port'] = log_configs['external_endpoint']['port']
    else:
        config_dict['log_external'] = False

    # external DB, optional, if external_db enabled, it will cover the database config
    external_db_configs = configs.get('external_database') or {}
    if external_db_configs:
        config_dict['external_database'] = True
        # harbor db
        config_dict['harbor_db_host'] = external_db_configs['harbor']['host']
        config_dict['harbor_db_port'] = external_db_configs['harbor']['port']
        config_dict['harbor_db_name'] = external_db_configs['harbor'][
            'db_name']
        config_dict['harbor_db_username'] = external_db_configs['harbor'][
            'username']
        config_dict['harbor_db_password'] = external_db_configs['harbor'][
            'password']
        config_dict['harbor_db_sslmode'] = external_db_configs['harbor'][
            'ssl_mode']
        config_dict['harbor_db_max_idle_conns'] = external_db_configs[
            'harbor'].get("max_idle_conns") or default_db_max_idle_conns
        config_dict['harbor_db_max_open_conns'] = external_db_configs[
            'harbor'].get("max_open_conns") or default_db_max_open_conns

        if with_notary:
            # notary signer
            config_dict['notary_signer_db_host'] = external_db_configs[
                'notary_signer']['host']
            config_dict['notary_signer_db_port'] = external_db_configs[
                'notary_signer']['port']
            config_dict['notary_signer_db_name'] = external_db_configs[
                'notary_signer']['db_name']
            config_dict['notary_signer_db_username'] = external_db_configs[
                'notary_signer']['username']
            config_dict['notary_signer_db_password'] = external_db_configs[
                'notary_signer']['password']
            config_dict['notary_signer_db_sslmode'] = external_db_configs[
                'notary_signer']['ssl_mode']
            # notary server
            config_dict['notary_server_db_host'] = external_db_configs[
                'notary_server']['host']
            config_dict['notary_server_db_port'] = external_db_configs[
                'notary_server']['port']
            config_dict['notary_server_db_name'] = external_db_configs[
                'notary_server']['db_name']
            config_dict['notary_server_db_username'] = external_db_configs[
                'notary_server']['username']
            config_dict['notary_server_db_password'] = external_db_configs[
                'notary_server']['password']
            config_dict['notary_server_db_sslmode'] = external_db_configs[
                'notary_server']['ssl_mode']
    else:
        config_dict['external_database'] = False

    # update redis configs
    config_dict.update(
        get_redis_configs(configs.get("external_redis", None), with_trivy))

    # auto generated secret string for core
    config_dict['core_secret'] = generate_random_string(16)

    # UAA configs
    config_dict['uaa'] = configs.get('uaa') or {}

    config_dict['registry_username'] = REGISTRY_USER_NAME
    config_dict['registry_password'] = generate_random_string(32)

    internal_tls_config = configs.get('internal_tls')
    # TLS related configs
    if internal_tls_config and internal_tls_config.get('enabled'):
        config_dict['internal_tls'] = InternalTLS(
            internal_tls_config['enabled'],
            False,
            internal_tls_config['dir'],
            configs['data_volume'],
            with_notary=with_notary,
            with_trivy=with_trivy,
            with_chartmuseum=with_chartmuseum,
            external_database=config_dict['external_database'])
    else:
        config_dict['internal_tls'] = InternalTLS()

    # metric configs
    metric_config = configs.get('metric')
    if metric_config:
        config_dict['metric'] = Metric(metric_config['enabled'],
                                       metric_config['port'],
                                       metric_config['path'])
    else:
        config_dict['metric'] = Metric()

    # trace configs
    trace_config = configs.get('trace')
    config_dict['trace'] = Trace(trace_config or {})

    if config_dict['internal_tls'].enabled:
        config_dict['portal_url'] = 'https://portal:8443'
        config_dict['registry_url'] = 'https://registry:5443'
        config_dict['registry_controller_url'] = 'https://registryctl:8443'
        config_dict['core_url'] = 'https://core:8443'
        config_dict['core_local_url'] = 'https://core:8443'
        config_dict['token_service_url'] = 'https://core:8443/service/token'
        config_dict['jobservice_url'] = 'https://jobservice:8443'
        config_dict['trivy_adapter_url'] = 'https://trivy-adapter:8443'
        # config_dict['notary_url'] = 'http://notary-server:4443'
        config_dict['chart_repository_url'] = 'https://chartmuseum:9443'

    # purge upload configs
    purge_upload_config = configs.get('upload_purging')
    config_dict['purge_upload'] = PurgeUpload(purge_upload_config or {})

    # cache configs
    cache_config = configs.get('cache')
    config_dict['cache'] = Cache(cache_config or {})

    return config_dict