Exemplo n.º 1
0
def cache_full():
    cache_name = 'pytest_%s' % uuid.uuid4()
    temporary_cache = FileCache(cache_name)
    temporary_cache[FEATURES_URL] = MOCK_ALL_FEATURES
    temporary_cache.sync()
    yield temporary_cache
    temporary_cache.delete()
Exemplo n.º 2
0
def test_aggregate_and_send_metrics():
    responses.add(responses.POST, FULL_METRICS_URL, json={}, status=200)

    start_time = datetime.now(pytz.utc) - timedelta(seconds=60)
    cache = FileCache("TestCache")
    cache[METRIC_LAST_SENT_TIME] = start_time
    strategies = [RemoteAddress(parameters={"IPs": IP_LIST}), Default()]
    my_feature1 = Feature("My Feature1", True, strategies)
    my_feature1.yes_count = 1
    my_feature1.no_count = 1

    my_feature2 = Feature("My Feature2", True, strategies)
    my_feature2.yes_count = 2
    my_feature2.no_count = 2

    features = {"My Feature1": my_feature1, "My Feature 2": my_feature2}

    aggregate_and_send_metrics(URL, APP_NAME, INSTANCE_ID, CUSTOM_HEADERS,
                               CUSTOM_OPTIONS, features, cache)

    assert len(responses.calls) == 1
    request = json.loads(responses.calls[0].request.body)

    assert len(request['bucket']["toggles"].keys()) == 2
    assert request['bucket']["toggles"]["My Feature1"]["yes"] == 1
    assert request['bucket']["toggles"]["My Feature1"]["no"] == 1
    assert cache[METRIC_LAST_SENT_TIME] > start_time
Exemplo n.º 3
0
 def __init__(self,
              cache: bool = False,
              ratelimiter: Optional[AsyncLimiter] = None) -> None:
     self._logger = logging.getLogger(__name__)
     self._cache = FileCache('dipdup', flag='cs') if cache else None
     self._ratelimiter = ratelimiter
     self._session = aiohttp.ClientSession()
Exemplo n.º 4
0
def cache_custom():
    cache_name = 'pytest_%s' % uuid.uuid4()
    temporary_cache = FileCache(cache_name)
    temporary_cache[FEATURES_URL] = MOCK_CUSTOM_STRATEGY
    temporary_cache.sync()
    yield temporary_cache
    temporary_cache.delete()
Exemplo n.º 5
0
    def __init__(self,
                 url,
                 app_name,
                 instance_id="unleash-client-python",
                 refresh_interval=15,
                 metrics_interval=60,
                 disable_metrics=False,
                 disable_registration=False,
                 custom_headers={},
                 custom_strategies={},
                 cache_directory=None):
        """
        A client for the Unleash feature toggle system.

        :param url: URL of the unleash server, required.
        :param app_name: Name of the application using the unleash client, required.
        :param instance_id: Unique identifier for unleash client instance, optional & defaults to "unleash-client-python"
        :param refresh_interval: Provisioning refresh interval in ms, optional & defaults to 15 seconds
        :param metrics_interval: Metrics refresh interval in ms, optional & defaults to 60 seconds
        :param disable_metrics: Disables sending metrics to unleash server, optional & defaults to false.
        :param custom_headers: Default headers to send to unleash server, optional & defaults to empty.
        :param custom_strategies: Dictionary of custom strategy names : custom strategy objects
        :param cache_directory: Location of the cache directory. When unset, FCache will determine the location
        """
        # Configuration
        self.unleash_url = url.rstrip('\\')
        self.unleash_app_name = app_name
        self.unleash_instance_id = instance_id
        self.unleash_refresh_interval = refresh_interval
        self.unleash_metrics_interval = metrics_interval
        self.unleash_disable_metrics = disable_metrics
        self.unleash_disable_registration = disable_registration
        self.unleash_custom_headers = custom_headers

        # Class objects
        self.cache = FileCache(self.unleash_instance_id,
                               app_cache_dir=cache_directory)
        self.features = {}  # type: Dict
        self.scheduler = BackgroundScheduler()
        self.fl_job = None  # type: Job
        self.metric_job = None  # type: Job
        self.cache[METRIC_LAST_SENT_TIME] = datetime.now(timezone.utc)
        self.cache.sync()

        # Mappings
        default_strategy_mapping = {
            "applicationHostname": ApplicationHostname,
            "default": Default,
            "gradualRolloutRandom": GradualRolloutRandom,
            "gradualRolloutSessionId": GradualRolloutSessionId,
            "gradualRolloutUserId": GradualRolloutUserId,
            "remoteAddress": RemoteAddress,
            "userWithId": UserWithId
        }

        self.strategy_mapping = default_strategy_mapping.copy()
        self.strategy_mapping.update(custom_strategies)

        # Client status
        self.is_initialized = False
Exemplo n.º 6
0
def get_stopwords(extra_stopwords=[]):
    cache = FileCache('stopwords')
    stopwords = cache.get('stopwords', [])
    if stopwords:
        return set(stopwords + extra_stopwords)
    else:
        cache['stopwords'] = default_stopwords()
        cache.sync()
        return set(cache['stopwords'] + extra_stopwords)
Exemplo n.º 7
0
 def __init__(self, reddit_session, sr_name, teams):
     self.r = reddit_session
     if reddit_session is not None:
         # You can set it to None if you know you're not going to need it.
         self.sub = self.r.subreddit(sr_name)
     self.teams = teams.split(',')
     self.aaf = AAFClient('aafgamethread;reddit.com/r/%s' % sr_name)
     self.games = FileCache('aafgamethread_%s' % sr_name, flag='cs')
     self.renderer = GameThreadRenderer(sr_name)
Exemplo n.º 8
0
    def __init__(self, config_file: str = "SpotiBot.json") -> None:
        """Instantiates instances of environment configuration from .ini file.

        Args:
            config_file: Name of .ini configuration file following the
                format of SpotiBot_SAMPLE.ini
        """
        self.cache = FileCache(config_file.split(r".")[0], flag="cs")
        self.config_file = config_file
        self.path_to_config = self.cache.get(r"path_to_config")
Exemplo n.º 9
0
 def get_rabbit_host_from_vhost(self, vhost, caching=True):
     if caching:
         vhost_host_cache = FileCache('vhost-host', flag='cs')
         if vhost_host_cache.get(vhost):
             return vhost_host_cache[vhost]
         else:
             vhost_host_cache[vhost] = self.get_host_action(vhost)
             return vhost_host_cache[vhost]
     else:
         return self.get_host_action(vhost)
Exemplo n.º 10
0
def get_latlng(location):
    cache = FileCache('uber-button-cache')

    if location not in cache:
        geolocator = Nominatim(user_agent="Uber-Button")
        geolocator.timeout = 60
        time.sleep(1.1)
        cache[location] = geolocator.geocode(location)
        cache.sync()

    return cache[location]
Exemplo n.º 11
0
def get_stemmed_stopwords(extra_stopwords=[]):
    cache = FileCache('stopwords')
    stemmed_stopwords = cache.get('stemmed_stopwords', [])
    stemmed_extra = list(set([stemmize(w) for w in extra_stopwords]))
    if stemmed_stopwords:
        return set(stemmed_stopwords + stemmed_extra)
    else:
        stemmed = list(set(stemmize(w) for w in get_stopwords()))
        cache['stemmed_stopwords'] = stemmed
        cache.sync()
        return set(stemmed + stemmed_extra)
Exemplo n.º 12
0
    def __init__(self, environment):
        """Initializes the cache so that kraft does not have to constantly
        retrieve informational lists about unikraft, its available architectures,
        platforms, libraries and supported applications."""

        self._cachedir = os.path.join(environment.get('UK_WORKDIR'), 'kraft.cache')

        self._cache = FileCache(
            app_cache_dir = self._cachedir,
            appname = __program__,
            flag='cs'
        )
Exemplo n.º 13
0
    def __init__(self,
                 url: str,
                 app_name: str,
                 instance_id: str = "unleash-client-python",
                 refresh_interval: int = 15,
                 metrics_interval: int = 60,
                 disable_metrics: bool = False,
                 custom_headers: dict = {},
                 custom_strategies: dict = {}) -> None:
        """
        A client for the Unleash feature toggle system.

        :param url: URL of the unleash server, required.
        :param app_name: Name of the application using the unleash client, required.
        :param instance_id: Unique identifier for unleash client instance, optional & defaults to "unleash-client-python"
        :param refresh_interval: Provisioning refresh interval in ms, optional & defaults to 15 seconds
        :param metrics_interval: Metrics refresh interval in ms, optional & defaults to 60 seconds
        :param disable_metrics: Disables sending metrics to unleash server, optional & defaults to false.
        :param custom_headers: Default headers to send to unleash server, optional & defaults to empty.
        :param custom_strategies: Dictionary of custom strategy names : custom strategy objects
        """
        # Configuration
        self.unleash_url = url.rstrip('\\')
        self.unleash_app_name = app_name
        self.unleash_instance_id = instance_id
        self.unleash_refresh_interval = refresh_interval
        self.unleash_metrics_interval = metrics_interval
        self.unleash_disable_metrics = disable_metrics
        self.unleash_custom_headers = custom_headers

        # Class objects
        self.cache = FileCache(self.unleash_instance_id)
        self.features: dict = {}
        self.scheduler = BackgroundScheduler()
        self.fl_job: Job = None
        self.metric_job: Job = None
        self.metrics_last_sent_time = datetime.now()

        # Mappings
        default_strategy_mapping = {
            "applicationHostname": ApplicationHostname,
            "default": Default,
            "gradualRolloutRandom": GradualRolloutRandom,
            "gradualRolloutSessionId": GradualRolloutSessionId,
            "gradualRolloutUserId": GradualRolloutUserId,
            "remoteAddress": RemoteAddress,
            "userWithId": UserWithId
        }

        self.strategy_mapping = {**custom_strategies, **default_strategy_mapping}

        # Client status
        self.is_initialized = False
Exemplo n.º 14
0
            def wrapper(*a, **kw):
                cache = FileCache(self.cache_store, serialize=True, flag='cs')
                key = self.get_key(*a, **kw)
                if not self.cache_updateable(cache, key):
                    retval = method(*a, **kw)
                    cache[key] = {
                        'retval': retval,
                        'datetime': datetime.today().date()
                    }
                else:
                    retval = cache[key].get('retval')

                cache.close()
                return retval
Exemplo n.º 15
0
def load_intermediate_ks(dirname: str, suffix: str, cache_dir):
    ks = FileCache('precomputed_ks-{}'.format(suffix),
                   flag='ns',
                   app_cache_dir=cache_dir)
    dir = os.fsencode(dirname)

    for file in os.listdir(dir):
        filename = os.fsdecode(file)
        if filename.endswith(suffix):
            ks_f: dict = json.load(
                open('{}/{}'.format(dirname, filename), mode='rt'))
            for k in ks_f:
                ks[k] = ks_f[k]
    return ks
Exemplo n.º 16
0
    def __init__(self, environment):
        """
        Initializes the cache so that kraft does not have to constantly
        retrieve informational lists about unikraft, its available
        architectures, platforms, libraries and supported applications.
        """

        self._cachedir = environment.get('UK_CACHEDIR')

        # Initiaize a cache instance
        self._cache = FileCache(app_cache_dir=self._cachedir,
                                appname=__program__,
                                flag='cs')

        self._cache_lock = threading.Lock()
Exemplo n.º 17
0
 def __init__(self,
              name: str,
              bits: int,
              start: typing.Union[int, typing.Tuple[int, int]] = 0,
              step: typing.Union[int, typing.Tuple[int, int]] = 1,
              keys: typing.List[str] = None,
              cache_name: str = None):
     super().__init__(name, bits)
     self.start: typing.Tuple[int, int] = (start, start) if isinstance(
         start, int) else start
     self.step: typing.Tuple[int, int] = (step, step) if isinstance(
         step, int) else step
     self.rand = random.Random()
     self.keys = keys
     self.cache = FileCache(APPNAME +
                            cache_name, flag='cs') if cache_name else {}
Exemplo n.º 18
0
    def __init__(self):
        file_path = os.path.dirname(__file__)
        if file_path != "":
            os.chdir(file_path)

        with open('config.yml', 'r') as ymlfile:
            self.config = yaml.load(ymlfile, Loader=yaml.FullLoader)

        self.cache = FileCache('zoe-domoticz', flag='cs')
        self.domoticz = Domoticz(self.config)

        # PyZE stuff:
        self.gigya = Gigya()
        self.login()
        self.kamereon = Kamereon(gigya=self.gigya,
                                 country=self.config['myRenaultCountry'])
        self.vehicle = Vehicle(self.config['myRenaultVIN'], self.kamereon)
Exemplo n.º 19
0
    def test_cache_updateable(self):
        encode_key = lambda a, b: "{},{}".format(a, b)
        c = Cache(encode_key, 'prefix', validate_expiry=lambda *a: True)
        key = c.get_key(1, 2)

        cache = FileCache(c.cache_store, serialize=True, flag='cs')

        try:
            del cache[key]
        except:
            pass

        assert c.cache_updateable(cache, key) is False

        cache[key] = {'retval': 'test', 'datetime': datetime.today().date()}

        assert c.cache_updateable(cache, key) is True
        cache.close()
Exemplo n.º 20
0
def __main__():
    conky_path = os.path.dirname(os.path.realpath(__file__))
    cache_path = conky_path + '/.cache'

    # Create cache
    cache = FileCache('conky_cache', flag='cs', app_cache_dir=cache_path)
    cache.create()

    system.__main__()
    cpu.__main__()
    ram.__main__()
    temps.__main__()
    disks.__main__()
    network.__main__()
    containers.__main__()

    # Close Cache
    cache.close()
Exemplo n.º 21
0
def main(argv=None):
    """
    Logic:
        * Generate the unique compile command database.
        * Get worktree branches and changes urls in compile command database
    """
    global logger, cache

    # Getting environment variables
    ccdb_worktree_env = os.environ.get('CCDB_WORKTREE')
    ccdb_worktree_apply_env = os.environ.get('CCDB_WORKTREE_APPLICATION')

    # Create a custom logger
    logger = logging.getLogger(__name__)
    # - Create handlers
    c_handler = logging.StreamHandler()
    # - Create formatters and add it to handlers
    c_format = '[%(asctime)s][ccdb][%(levelname)s] %(message)s'
    c_format = logging.Formatter(c_format)
    c_handler.setFormatter(c_format)
    # - Add handlers to the logger
    logger.addHandler(c_handler)

    # Parse arguments
    parse_arguments(args=argv)

    # Generate unique compile command database
    logger.debug('Generating compile command database')
    list_project_dirs = generate_compile_command()

    if not list_project_dirs:
        exit(0)

    if ccdb_worktree_env is not None:
        if ccdb_worktree_apply_env:
            apply_worktree_env_using_envvar(ccdb_worktree_apply_env)
        else:
            # Load cache
            cache = FileCache('ccdb')
            logger.debug(
                'Applying worktree configuration to compile command database')
            apply_worktree_env(list_project_dirs)
            cache.close()
Exemplo n.º 22
0
    def __init__(self,
                 url: str,
                 app_name: str,
                 instance_id: str = "unleash-client-python",
                 refresh_interval: int = 15,
                 metrics_interval: int = 60,
                 disable_metrics: bool = False,
                 custom_headers: dict = {}) -> None:
        """
        A client for the Unleash feature toggle system.

        :param url: URL of the unleash server, required.
        :param app_name: Name of the application using the unleash client, required.
        :param instance_id: Unique identifier for unleash client instance, optional & defaults to "unleash-client-python"
        :param refresh_interval: Provisioning refresh interval in ms, optional & defaults to 15 seconds
        :param metrics_interval: Metrics refresh interval in ms, optional & defaults to 60 seconds
        :param disable_metrics: Disables sending metrics to unleash server, optional & defaults to false.
        :param custom_headers: Default headers to send to unleash server, optional & defaults to empty.
        """
        # Configuration
        self.unleash_url = url.rstrip('\\')
        self.unleash_app_name = app_name
        self.unleash_instance_id = instance_id
        self.unleash_refresh_interval = refresh_interval
        self.unleash_metrics_interval = metrics_interval
        self.unleash_disable_metrics = disable_metrics
        self.unleash_custom_headers = custom_headers

        # Class objects
        self.cache = FileCache("Unleash")
        self.features: dict = {}
        self.scheduler = BackgroundScheduler()
        self.fl_job: Job = None
        self.metric_job: Job = None
        self.metrics_last_sent_time = datetime.now()

        # Client status
        self.is_initialized = False
Exemplo n.º 23
0
    def test_decorator(self):
        encode_key = lambda a, b, *aa, **kw: "{},{}".format(a, b)
        retval = int(time.time())
        c = Cache(encode_key, 'prefix', validate_expiry=lambda *a: a[0])

        try:
            cache = FileCache(c.cache_store, serialize=True, flag='cs')
            del cache[c.get_key(True, 1)]
            cache.close()
        except Exception:
            pass

        @c.cache()
        def _cache(a, b, t):
            if not t:
                return int(time.time())
            return t

        cache_ret = _cache(True, 1, retval)
        assert cache_ret == retval

        cache_ret = _cache(True, 1, None)
        assert cache_ret == retval
Exemplo n.º 24
0
    def detect_stuck_messages_queue(queue_body, ttl):
        past_queue = FileCache('message-queue', flag='cs')
        queue_name = queue_body['name']
        current_messages = queue_body['messages']
        current_consumers = queue_body['consumers']

        current_time = datetime.now(timezone.utc)
        current_time = current_time.replace(tzinfo=None)

        if past_queue.get(queue_name):
            time_range_minutes = timedelta.total_seconds(current_time - past_queue[queue_name]['time_catch']) / 60
            if past_queue[queue_name]['messages'] == current_messages:
                if time_range_minutes > ttl:
                    return True
                if time_range_minutes < ttl:
                    return False
            else:
                past_queue[queue_name] = {'messages': current_messages, 'time_catch': current_time,
                                          'consumers': current_consumers}
                return False
        else:
            past_queue[queue_name] = {'messages': current_messages, 'time_catch': current_time,
                                      'consumers': current_consumers}
            return False
Exemplo n.º 25
0
from .backends import AzureActiveDirectoryBackend
from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME, login
from django import VERSION
from django.http import HttpResponseRedirect
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.cache import never_cache
from fcache.cache import FileCache
import uuid

ad_cache = FileCache('azure_ad_auth_cache', flag='c')

if VERSION[0] < 2:
    from django.core.urlresolvers import reverse
else:
    from django.urls import reverse

try:
    # Python 3
    from urllib.parse import urlparse
except ImportError:
    # Python 2
    from urlparse import urlparse


@never_cache
def auth(request):
    backend = AzureActiveDirectoryBackend()
    redirect_uri = request.build_absolute_uri(reverse(complete))
    nonce = str(uuid.uuid4())
    request.session['nonce'] = nonce
Exemplo n.º 26
0
    def __init__(self, config, debugging=False):
        Utils.isDebugging = debugging
        self.config = config

        # read config
        c = self.config['site']
        self.port = c['port']
        self.port_secure = c['port_secure']

        c = self.config['config']
        self.use_https = c['use_https']
        self.use_auth = c['use_auth']
        self.use_alerts = c['use_alerts']
        self.motion_pin = c['motion_pin']
        self.file_name = "/var/log/%s.log" % (os.getcwd().split(os.sep)[-1])

        Utils.temperature_pin = c['temperature_pin']

        c = self.config['config']['times']
        self.time_to_close = c['to_close_door']
        self.time_to_open = c['to_open_door']
        self.time_to_report_open = c['to_report_open']
        self.time_to_report_still_open = c['to_report_still_open']
        self.time_to_force_close = c['to_force_close']

        c = self.config['alerts']
        self.when_opened = c['when_opened']
        self.when_closed = c['when_closed']
        self.on_days_of_week = c['on_days_of_week']
        self.from_time = c['from_time']
        self.to_time = c['to_time']
        self.alert_type = c['alert_type']

        c = self.config['mqtt']
        self.mqtt_server = c['server']
        self.mqtt_username = c['username']
        self.mqtt_password = c['password']

        c = self.config['mqtt']['topics']
        self.mqtt_topic_garage = c['garage']
        self.mqtt_topic_temperature = c['temperature']
        self.mqtt_topic_day_temperature = c['day_temperature']

        c = self.config['weatherapi']
        self.weather_url = c['url']
        self.weather_key = c['key']

        for arg in sys.argv:
            if str(arg) == 'debug':
                # ex. python controller.py debug -v
                Utils.isDebugging = True
                if self.time_to_report_open > 35:
                    self.time_to_report_open = 35
                self.time_to_report_still_open = 100
                Utils.gfileCache += "debug"

            if str(arg).startswith('port='):
                self.port = int((arg).split('=')[1])
                self.port_secure = self.port

        # set up fcache to log last time garage door was opened
        self.fileCache = FileCache(Utils.gfileCache, flag='cs')

        # set up logging
        log_fmt = '%(asctime)s %(levelname)-8s %(message)s'
        date_fmt = '%a, %m/%d/%y %H:%M:%S'
        log_level = logging.INFO

        if Utils.isDebugging:
            logging.basicConfig(datefmt=date_fmt,
                                format=log_fmt,
                                level=log_level)
            self.debugMsg = "Debugging=%s time_to_report_open=%d time_to_report_still_open %d gfileCache=%s" % (
                Utils.isDebugging, self.time_to_report_open,
                self.time_to_report_still_open, Utils.gfileCache)
        else:
            self.debugMsg = "Debugging=%s" % Utils.isDebugging
            logging.getLogger('mylogger').setLevel(logging.NOTSET)
            logging.basicConfig(datefmt=date_fmt,
                                format=log_fmt,
                                level=log_level,
                                filename=self.file_name)
            rotatingHandler = logging.handlers.RotatingFileHandler(
                self.file_name, maxBytes=5000000, backupCount=3)
            rotatingHandler.setLevel(log_level)
            rotatingHandler.setFormatter(logging.Formatter(log_fmt))
            logging.getLogger('mylogger').addHandler(rotatingHandler)

            gpio.setwarnings(False)
            gpio.cleanup()
            gpio.setmode(gpio.BCM)

        # Banner
        logging.info("<---Garage Controller starting (port=%s %s) --->" %
                     (self.port_secure, self.debugMsg))

        self.updateHandler = UpdateHandler(self)

        self.initMsg = ""

        # setup motion sensor
        if self.motion_pin != None and Utils.isDebugging != True:
            gpio.setup(self.motion_pin, gpio.IN)
            gpio.add_event_detect(self.motion_pin,
                                  gpio.RISING,
                                  callback=self.on_motion,
                                  bouncetime=300)
            logging.info("Motion pin = %s" % (self.motion_pin))

        # setup Doors from config file
        self.doors = [
            Doors.Door(x, c) for (x, c) in sorted(config['doors'].items())
        ]
        for door in self.doors:
            door.setup(gpio, self.get_time_since_last_open(door.id))
            self.set_initial_text_msg(door)

        # setup alerts
        if self.alert_type == 'smtp':
            self.use_smtp = False
            smtp_params = ("smtphost", "smtpport", "smtp_tls", "username",
                           "password", "to_email")
            self.use_smtp = ('smtp'
                             in config['alerts']) and set(smtp_params) <= set(
                                 config['alerts']['smtp'])
        elif self.alert_type == 'pushover':
            self.pushover_user_key = config['alerts']['pushover']['user_key']
            self.pushover_api_key = config['alerts']['pushover']['api_key']
        else:
            self.alert_type = None
            logging.info("No alerts configured")

        if Utils.isDebugging:
            print self.initMsg
        else:
            logging.info(self.initMsg)
            self.send_msg(self.initMsg)
Exemplo n.º 27
0
def cache_empty():
    cache_name = 'pytest_%s' % uuid.uuid4()
    temporary_cache = FileCache(cache_name)
    yield temporary_cache
    temporary_cache.delete()
Exemplo n.º 28
0
 def __init__(self, arguments):
     self._cache = FileCache(arguments.pop("appname", None),
                             flag=arguments.pop("flag", "c"),
                             serialize=arguments.pop("serialize", True),
                             app_cache_dir=arguments.pop(
                                 "app_cache_dir", None))
Exemplo n.º 29
0
    def __init__(self,
                 url: str,
                 app_name: str,
                 environment: str = "default",
                 instance_id: str = "unleash-client-python",
                 refresh_interval: int = 15,
                 metrics_interval: int = 60,
                 disable_metrics: bool = False,
                 disable_registration: bool = False,
                 custom_headers: Optional[dict] = None,
                 custom_options: Optional[dict] = None,
                 custom_strategies: Optional[dict] = None,
                 cache_directory: str = None,
                 project_name: str = None) -> None:
        """
        A client for the Unleash feature toggle system.

        :param url: URL of the unleash server, required.
        :param app_name: Name of the application using the unleash client, required.
        :param environment: Name of the environment using the unleash client, optional & defaults to "default".
        :param instance_id: Unique identifier for unleash client instance, optional & defaults to "unleash-client-python"
        :param refresh_interval: Provisioning refresh interval in ms, optional & defaults to 15 seconds
        :param metrics_interval: Metrics refresh interval in ms, optional & defaults to 60 seconds
        :param disable_metrics: Disables sending metrics to unleash server, optional & defaults to false.
        :param custom_headers: Default headers to send to unleash server, optional & defaults to empty.
        :param custom_options: Default requests parameters, optional & defaults to empty.
        :param custom_strategies: Dictionary of custom strategy names : custom strategy objects
        :param cache_directory: Location of the cache directory. When unset, FCache will determine the location
        """
        custom_headers = custom_headers or {}
        custom_options = custom_options or {}
        custom_strategies = custom_strategies or {}

        # Configuration
        self.unleash_url = url.rstrip('\\')
        self.unleash_app_name = app_name
        self.unleash_environment = environment
        self.unleash_instance_id = instance_id
        self.unleash_refresh_interval = refresh_interval
        self.unleash_metrics_interval = metrics_interval
        self.unleash_disable_metrics = disable_metrics
        self.unleash_disable_registration = disable_registration
        self.unleash_custom_headers = custom_headers
        self.unleash_custom_options = custom_options
        self.unleash_static_context = {
            "appName": self.unleash_app_name,
            "environment": self.unleash_environment
        }
        self.unleash_project_name = project_name

        # Class objects
        self.cache = FileCache(self.unleash_instance_id,
                               app_cache_dir=cache_directory)
        self.features: dict = {}
        self.scheduler = BackgroundScheduler()
        self.fl_job: Job = None
        self.metric_job: Job = None
        self.cache[METRIC_LAST_SENT_TIME] = datetime.now(timezone.utc)
        self.cache.sync()

        # Mappings
        default_strategy_mapping = {
            "applicationHostname": ApplicationHostname,
            "default": Default,
            "gradualRolloutRandom": GradualRolloutRandom,
            "gradualRolloutSessionId": GradualRolloutSessionId,
            "gradualRolloutUserId": GradualRolloutUserId,
            "remoteAddress": RemoteAddress,
            "userWithId": UserWithId,
            "flexibleRollout": FlexibleRollout
        }

        if custom_strategies:
            strategy_v2xx_deprecation_check(
                [x for x in custom_strategies.values()])  # pylint: disable=R1721

        self.strategy_mapping = {
            **custom_strategies,
            **default_strategy_mapping
        }

        # Client status
        self.is_initialized = False
Exemplo n.º 30
0
from rdkit.Chem.rdMolDescriptors import CalcMolFormula
from pubchempy import get_compounds, Compound
import json
import db_preprocessor
from collections import Counter
from chemicals import serialize_formula

db_preprocessor.write()

os.system('python2 parse_pdf.py')

from fcache.cache import FileCache

mycache = FileCache(
    'myapp',
    flag='cs',
    serialize=True,
    app_cache_dir=
    '/home/caleb/Documents/University/CHE3123/chemical-metadata/fcache')

syn_data = open('Good synoynms by CAS.json').read()
syn_data = json.loads(syn_data)

all_user_names = []
for CAS, d in syn_data.items():
    if 'synonyms' in d:
        all_user_names.extend(d['synonyms'])
all_user_names = set(all_user_names)

pdf_data = open('Parsed scifinder metadata.json').read()
pdf_data = json.loads(pdf_data)