Beispiel #1
0
 def __init__(self,
              cache: bool = False,
              ratelimiter: Optional[AsyncLimiter] = None) -> None:
     self._logger = logging.getLogger(__name__)
     self._cache = FileCache('dipdup', flag='cs') if cache else None
     self._ratelimiter = ratelimiter
     self._session = aiohttp.ClientSession()
def _create_strategies(provisioning: dict,
                       strategy_mapping: dict,
                       cache: FileCache) -> list:
    feature_strategies = []

    for strategy in provisioning["strategies"]:
        try:
            if "parameters" in strategy.keys():
                strategy_provisioning = strategy['parameters']
            else:
                strategy_provisioning = {}

            if "constraints" in strategy.keys():
                constraint_provisioning = strategy['constraints']
            else:
                constraint_provisioning = {}

            feature_strategies.append(strategy_mapping[strategy['name']](
                constraints=constraint_provisioning, parameters=strategy_provisioning
            ))
        except Exception as excep:
            if FAILED_STRATEGIES not in cache.keys():
                cache[FAILED_STRATEGIES] = []  # Initialize cache key only if failures exist.

            if strategy['name'] not in cache[FAILED_STRATEGIES]:
                LOGGER.warning("Failed to load strategy. This may be a problem with a custom strategy. Exception: %s", excep)
                cache[FAILED_STRATEGIES].append(strategy['name'])

    return feature_strategies
Beispiel #3
0
def test_aggregate_and_send_metrics():
    responses.add(responses.POST, FULL_METRICS_URL, json={}, status=200)

    start_time = datetime.now(pytz.utc) - timedelta(seconds=60)
    cache = FileCache("TestCache")
    cache[METRIC_LAST_SENT_TIME] = start_time
    strategies = [RemoteAddress(parameters={"IPs": IP_LIST}), Default()]
    my_feature1 = Feature("My Feature1", True, strategies)
    my_feature1.yes_count = 1
    my_feature1.no_count = 1

    my_feature2 = Feature("My Feature2", True, strategies)
    my_feature2.yes_count = 2
    my_feature2.no_count = 2

    features = {"My Feature1": my_feature1, "My Feature 2": my_feature2}

    aggregate_and_send_metrics(URL, APP_NAME, INSTANCE_ID, CUSTOM_HEADERS,
                               CUSTOM_OPTIONS, features, cache)

    assert len(responses.calls) == 1
    request = json.loads(responses.calls[0].request.body)

    assert len(request['bucket']["toggles"].keys()) == 2
    assert request['bucket']["toggles"]["My Feature1"]["yes"] == 1
    assert request['bucket']["toggles"]["My Feature1"]["no"] == 1
    assert cache[METRIC_LAST_SENT_TIME] > start_time
def fetch_and_load_features(url: str,
                            app_name: str,
                            instance_id: str,
                            custom_headers: dict,
                            custom_options: dict,
                            cache: FileCache,
                            features: dict,
                            strategy_mapping: dict) -> None:
    feature_provisioning = get_feature_toggles(url, app_name, instance_id, custom_headers, custom_options)

    if feature_provisioning:
        cache[FEATURES_URL] = feature_provisioning
        cache.sync()
    else:
        LOGGER.warning("Unable to get feature flag toggles, using cached provisioning.")

    load_features(cache, features, strategy_mapping)
Beispiel #5
0
    def test_cache_updateable(self):
        encode_key = lambda a, b: "{},{}".format(a, b)
        c = Cache(encode_key, 'prefix', validate_expiry=lambda *a: True)
        key = c.get_key(1, 2)

        cache = FileCache(c.cache_store, serialize=True, flag='cs')

        try:
            del cache[key]
        except:
            pass

        assert c.cache_updateable(cache, key) is False

        cache[key] = {'retval': 'test', 'datetime': datetime.today().date()}

        assert c.cache_updateable(cache, key) is True
        cache.close()
Beispiel #6
0
def main(argv=None):
    """
    Logic:
        * Generate the unique compile command database.
        * Get worktree branches and changes urls in compile command database
    """
    global logger, cache

    # Getting environment variables
    ccdb_worktree_env = os.environ.get('CCDB_WORKTREE')
    ccdb_worktree_apply_env = os.environ.get('CCDB_WORKTREE_APPLICATION')

    # Create a custom logger
    logger = logging.getLogger(__name__)
    # - Create handlers
    c_handler = logging.StreamHandler()
    # - Create formatters and add it to handlers
    c_format = '[%(asctime)s][ccdb][%(levelname)s] %(message)s'
    c_format = logging.Formatter(c_format)
    c_handler.setFormatter(c_format)
    # - Add handlers to the logger
    logger.addHandler(c_handler)

    # Parse arguments
    parse_arguments(args=argv)

    # Generate unique compile command database
    logger.debug('Generating compile command database')
    list_project_dirs = generate_compile_command()

    if not list_project_dirs:
        exit(0)

    if ccdb_worktree_env is not None:
        if ccdb_worktree_apply_env:
            apply_worktree_env_using_envvar(ccdb_worktree_apply_env)
        else:
            # Load cache
            cache = FileCache('ccdb')
            logger.debug(
                'Applying worktree configuration to compile command database')
            apply_worktree_env(list_project_dirs)
            cache.close()
Beispiel #7
0
    def __init__(self, environment):
        """Initializes the cache so that kraft does not have to constantly
        retrieve informational lists about unikraft, its available architectures,
        platforms, libraries and supported applications."""

        self._cachedir = os.path.join(environment.get('UK_WORKDIR'), 'kraft.cache')

        self._cache = FileCache(
            app_cache_dir = self._cachedir,
            appname = __program__,
            flag='cs'
        )
Beispiel #8
0
def cache_full():
    cache_name = 'pytest_%s' % uuid.uuid4()
    temporary_cache = FileCache(cache_name)
    temporary_cache[FEATURES_URL] = MOCK_ALL_FEATURES
    temporary_cache.sync()
    yield temporary_cache
    temporary_cache.delete()
Beispiel #9
0
def cache_custom():
    cache_name = 'pytest_%s' % uuid.uuid4()
    temporary_cache = FileCache(cache_name)
    temporary_cache[FEATURES_URL] = MOCK_CUSTOM_STRATEGY
    temporary_cache.sync()
    yield temporary_cache
    temporary_cache.delete()
Beispiel #10
0
    def __init__(self,
                 url: str,
                 app_name: str,
                 instance_id: str = "unleash-client-python",
                 refresh_interval: int = 15,
                 metrics_interval: int = 60,
                 disable_metrics: bool = False,
                 custom_headers: dict = {}) -> None:
        """
        A client for the Unleash feature toggle system.

        :param url: URL of the unleash server, required.
        :param app_name: Name of the application using the unleash client, required.
        :param instance_id: Unique identifier for unleash client instance, optional & defaults to "unleash-client-python"
        :param refresh_interval: Provisioning refresh interval in ms, optional & defaults to 15 seconds
        :param metrics_interval: Metrics refresh interval in ms, optional & defaults to 60 seconds
        :param disable_metrics: Disables sending metrics to unleash server, optional & defaults to false.
        :param custom_headers: Default headers to send to unleash server, optional & defaults to empty.
        """
        # Configuration
        self.unleash_url = url.rstrip('\\')
        self.unleash_app_name = app_name
        self.unleash_instance_id = instance_id
        self.unleash_refresh_interval = refresh_interval
        self.unleash_metrics_interval = metrics_interval
        self.unleash_disable_metrics = disable_metrics
        self.unleash_custom_headers = custom_headers

        # Class objects
        self.cache = FileCache("Unleash")
        self.features: dict = {}
        self.scheduler = BackgroundScheduler()
        self.fl_job: Job = None
        self.metric_job: Job = None
        self.metrics_last_sent_time = datetime.now()

        # Client status
        self.is_initialized = False
Beispiel #11
0
    def test_decorator(self):
        encode_key = lambda a, b, *aa, **kw: "{},{}".format(a, b)
        retval = int(time.time())
        c = Cache(encode_key, 'prefix', validate_expiry=lambda *a: a[0])

        try:
            cache = FileCache(c.cache_store, serialize=True, flag='cs')
            del cache[c.get_key(True, 1)]
            cache.close()
        except Exception:
            pass

        @c.cache()
        def _cache(a, b, t):
            if not t:
                return int(time.time())
            return t

        cache_ret = _cache(True, 1, retval)
        assert cache_ret == retval

        cache_ret = _cache(True, 1, None)
        assert cache_ret == retval
Beispiel #12
0
def load_intermediate_ks(dirname: str, suffix: str, cache_dir):
    ks = FileCache('precomputed_ks-{}'.format(suffix),
                   flag='ns',
                   app_cache_dir=cache_dir)
    dir = os.fsencode(dirname)

    for file in os.listdir(dir):
        filename = os.fsdecode(file)
        if filename.endswith(suffix):
            ks_f: dict = json.load(
                open('{}/{}'.format(dirname, filename), mode='rt'))
            for k in ks_f:
                ks[k] = ks_f[k]
    return ks
    def detect_stuck_messages_queue(queue_body, ttl):
        past_queue = FileCache('message-queue', flag='cs')
        queue_name = queue_body['name']
        current_messages = queue_body['messages']
        current_consumers = queue_body['consumers']

        current_time = datetime.now(timezone.utc)
        current_time = current_time.replace(tzinfo=None)

        if past_queue.get(queue_name):
            time_range_minutes = timedelta.total_seconds(current_time - past_queue[queue_name]['time_catch']) / 60
            if past_queue[queue_name]['messages'] == current_messages:
                if time_range_minutes > ttl:
                    return True
                if time_range_minutes < ttl:
                    return False
            else:
                past_queue[queue_name] = {'messages': current_messages, 'time_catch': current_time,
                                          'consumers': current_consumers}
                return False
        else:
            past_queue[queue_name] = {'messages': current_messages, 'time_catch': current_time,
                                      'consumers': current_consumers}
            return False
 def __init__(self,
              name: str,
              bits: int,
              start: typing.Union[int, typing.Tuple[int, int]] = 0,
              step: typing.Union[int, typing.Tuple[int, int]] = 1,
              keys: typing.List[str] = None,
              cache_name: str = None):
     super().__init__(name, bits)
     self.start: typing.Tuple[int, int] = (start, start) if isinstance(
         start, int) else start
     self.step: typing.Tuple[int, int] = (step, step) if isinstance(
         step, int) else step
     self.rand = random.Random()
     self.keys = keys
     self.cache = FileCache(APPNAME +
                            cache_name, flag='cs') if cache_name else {}
Beispiel #15
0
    def __init__(self):
        file_path = os.path.dirname(__file__)
        if file_path != "":
            os.chdir(file_path)

        with open('config.yml', 'r') as ymlfile:
            self.config = yaml.load(ymlfile, Loader=yaml.FullLoader)

        self.cache = FileCache('zoe-domoticz', flag='cs')
        self.domoticz = Domoticz(self.config)

        # PyZE stuff:
        self.gigya = Gigya()
        self.login()
        self.kamereon = Kamereon(gigya=self.gigya,
                                 country=self.config['myRenaultCountry'])
        self.vehicle = Vehicle(self.config['myRenaultVIN'], self.kamereon)
Beispiel #16
0
def __main__():
    conky_path = os.path.dirname(os.path.realpath(__file__))
    cache_path = conky_path + '/.cache'

    # Create cache
    cache = FileCache('conky_cache', flag='cs', app_cache_dir=cache_path)
    cache.create()

    system.__main__()
    cpu.__main__()
    ram.__main__()
    temps.__main__()
    disks.__main__()
    network.__main__()
    containers.__main__()

    # Close Cache
    cache.close()
Beispiel #17
0
class UnleashClient():
    """
    Client implementation.
    """
    def __init__(self,
                 url: str,
                 app_name: str,
                 instance_id: str = "unleash-client-python",
                 refresh_interval: int = 15,
                 metrics_interval: int = 60,
                 disable_metrics: bool = False,
                 custom_headers: dict = {},
                 custom_strategies: dict = {}) -> None:
        """
        A client for the Unleash feature toggle system.

        :param url: URL of the unleash server, required.
        :param app_name: Name of the application using the unleash client, required.
        :param instance_id: Unique identifier for unleash client instance, optional & defaults to "unleash-client-python"
        :param refresh_interval: Provisioning refresh interval in ms, optional & defaults to 15 seconds
        :param metrics_interval: Metrics refresh interval in ms, optional & defaults to 60 seconds
        :param disable_metrics: Disables sending metrics to unleash server, optional & defaults to false.
        :param custom_headers: Default headers to send to unleash server, optional & defaults to empty.
        :param custom_strategies: Dictionary of custom strategy names : custom strategy objects
        """
        # Configuration
        self.unleash_url = url.rstrip('\\')
        self.unleash_app_name = app_name
        self.unleash_instance_id = instance_id
        self.unleash_refresh_interval = refresh_interval
        self.unleash_metrics_interval = metrics_interval
        self.unleash_disable_metrics = disable_metrics
        self.unleash_custom_headers = custom_headers

        # Class objects
        self.cache = FileCache(self.unleash_instance_id)
        self.features: dict = {}
        self.scheduler = BackgroundScheduler()
        self.fl_job: Job = None
        self.metric_job: Job = None
        self.metrics_last_sent_time = datetime.now()

        # Mappings
        default_strategy_mapping = {
            "applicationHostname": ApplicationHostname,
            "default": Default,
            "gradualRolloutRandom": GradualRolloutRandom,
            "gradualRolloutSessionId": GradualRolloutSessionId,
            "gradualRolloutUserId": GradualRolloutUserId,
            "remoteAddress": RemoteAddress,
            "userWithId": UserWithId
        }

        self.strategy_mapping = {**custom_strategies, **default_strategy_mapping}

        # Client status
        self.is_initialized = False

    def initialize_client(self) -> None:
        """
        Initializes client and starts communication with central unleash server(s).

        This kicks off:
        * Client registration
        * Provisioning poll
        * Stats poll

        :return:
        """
        # Setup
        fl_args = {
            "url": self.unleash_url,
            "app_name": self.unleash_app_name,
            "instance_id": self.unleash_instance_id,
            "custom_headers": self.unleash_custom_headers,
            "cache": self.cache,
            "features": self.features,
            "strategy_mapping": self.strategy_mapping
        }

        metrics_args = {
            "url": self.unleash_url,
            "app_name": self.unleash_app_name,
            "instance_id": self.unleash_instance_id,
            "custom_headers": self.unleash_custom_headers,
            "features": self.features,
            "last_sent": self.metrics_last_sent_time
        }

        # Register app
        register_client(self.unleash_url, self.unleash_app_name, self.unleash_instance_id,
                        self.unleash_metrics_interval, self.unleash_custom_headers, self.strategy_mapping)

        fetch_and_load_features(**fl_args)

        # Start periodic jobs
        self.scheduler.start()
        self.fl_job = self.scheduler.add_job(fetch_and_load_features,
                                             trigger=IntervalTrigger(seconds=int(self.unleash_refresh_interval)),
                                             kwargs=fl_args)

        self.metric_job = self.scheduler.add_job(aggregate_and_send_metrics,
                                                 trigger=IntervalTrigger(seconds=int(self.unleash_metrics_interval)),
                                                 kwargs=metrics_args)

        self.is_initialized = True

    def destroy(self):
        """
        Gracefully shuts down the Unleash client by stopping jobs, stopping the scheduler, and deleting the cache.

        You shouldn't need this too much!

        :return:
        """
        self.fl_job.remove()
        self.metric_job.remove()
        self.scheduler.shutdown()
        self.cache.delete()

    # pylint: disable=broad-except
    def is_enabled(self,
                   feature_name: str,
                   context: dict = {},
                   default_value: bool = False) -> bool:
        """
        Checks if a feature toggle is enabled.

        Notes:
        * If client hasn't been initialized yet or an error occurs, flat will default to false.

        :param feature_name: Name of the feature
        :param context: Dictionary with context (e.g. IPs, email) for feature toggle.
        :param default_value: Allows override of default value.
        :return: True/False
        """
        if self.is_initialized:
            try:
                return self.features[feature_name].is_enabled(context, default_value)
            except Exception as excep:
                LOGGER.warning("Returning default value for feature: %s", feature_name)
                LOGGER.warning("Error checking feature flag: %s", excep)
                return default_value
        else:
            LOGGER.warning("Returning default value for feature: %s", feature_name)
            LOGGER.warning("Attempted to get feature_flag %s, but client wasn't initialized!", feature_name)
            return default_value
Beispiel #18
0
class SZFileBackend(CacheBackend):
    def __init__(self, arguments):
        self._cache = FileCache(arguments.pop("appname", None), flag=arguments.pop("flag", "c"),
                                serialize=arguments.pop("serialize", True),
                                app_cache_dir=arguments.pop("app_cache_dir", None))

    def get(self, key):
        value = self._cache.get(key, NO_VALUE)

        return value

    def get_multi(self, keys):
        ret = [
            self._cache.get(key, NO_VALUE)
            for key in keys]

        return ret

    def set(self, key, value):
        self._cache[key] = value

    def set_multi(self, mapping):
        for key, value in mapping.items():
            self._cache[key] = value

    def delete(self, key):
        self._cache.pop(key, None)

    def delete_multi(self, keys):
        for key in keys:
            self._cache.pop(key, None)

    @property
    def all_filenames(self):
        return self._cache._all_filenames()

    def sync(self, force=False):
        if (hasattr(self._cache, "_buffer") and self._cache._buffer) or force:
            self._cache.sync()

    def clear(self):
        self._cache.clear()
        if not hasattr(self._cache, "_buffer") or self._cache._sync:
            self._cache._sync = False
            self._cache._buffer = {}
Beispiel #19
0
def cache_empty():
    cache_name = 'pytest_%s' % uuid.uuid4()
    temporary_cache = FileCache(cache_name)
    yield temporary_cache
    temporary_cache.delete()
Beispiel #20
0
 def __init__(self, arguments):
     self._cache = FileCache(arguments.pop("appname", None), flag=arguments.pop("flag", "c"),
                             serialize=arguments.pop("serialize", True),
                             app_cache_dir=arguments.pop("app_cache_dir", None))
Beispiel #21
0
 def __init__(self, arguments):
     self._cache = FileCache(arguments.pop("appname", None),
                             flag=arguments.pop("flag", "c"),
                             serialize=arguments.pop("serialize", True),
                             app_cache_dir=arguments.pop(
                                 "app_cache_dir", None))
Beispiel #22
0
class SZFileBackend(CacheBackend):
    def __init__(self, arguments):
        self._cache = FileCache(arguments.pop("appname", None),
                                flag=arguments.pop("flag", "c"),
                                serialize=arguments.pop("serialize", True),
                                app_cache_dir=arguments.pop(
                                    "app_cache_dir", None))

    def get(self, key):
        value = self._cache.get(key, NO_VALUE)

        return value

    def get_multi(self, keys):
        ret = [self._cache.get(key, NO_VALUE) for key in keys]

        return ret

    def set(self, key, value):
        self._cache[key] = value

    def set_multi(self, mapping):
        for key, value in mapping.items():
            self._cache[key] = value

    def delete(self, key):
        self._cache.pop(key, None)

    def delete_multi(self, keys):
        for key in keys:
            self._cache.pop(key, None)

    @property
    def all_filenames(self):
        return self._cache._all_filenames()

    def sync(self, force=False):
        if (hasattr(self._cache, "_buffer") and self._cache._buffer) or force:
            self._cache.sync()

    def clear(self):
        self._cache.clear()
Beispiel #23
0
    def __init__(self,
                 url: str,
                 app_name: str,
                 environment: str = "default",
                 instance_id: str = "unleash-client-python",
                 refresh_interval: int = 15,
                 metrics_interval: int = 60,
                 disable_metrics: bool = False,
                 disable_registration: bool = False,
                 custom_headers: Optional[dict] = None,
                 custom_options: Optional[dict] = None,
                 custom_strategies: Optional[dict] = None,
                 cache_directory: str = None,
                 project_name: str = None) -> None:
        """
        A client for the Unleash feature toggle system.

        :param url: URL of the unleash server, required.
        :param app_name: Name of the application using the unleash client, required.
        :param environment: Name of the environment using the unleash client, optional & defaults to "default".
        :param instance_id: Unique identifier for unleash client instance, optional & defaults to "unleash-client-python"
        :param refresh_interval: Provisioning refresh interval in ms, optional & defaults to 15 seconds
        :param metrics_interval: Metrics refresh interval in ms, optional & defaults to 60 seconds
        :param disable_metrics: Disables sending metrics to unleash server, optional & defaults to false.
        :param custom_headers: Default headers to send to unleash server, optional & defaults to empty.
        :param custom_options: Default requests parameters, optional & defaults to empty.
        :param custom_strategies: Dictionary of custom strategy names : custom strategy objects
        :param cache_directory: Location of the cache directory. When unset, FCache will determine the location
        """
        custom_headers = custom_headers or {}
        custom_options = custom_options or {}
        custom_strategies = custom_strategies or {}

        # Configuration
        self.unleash_url = url.rstrip('\\')
        self.unleash_app_name = app_name
        self.unleash_environment = environment
        self.unleash_instance_id = instance_id
        self.unleash_refresh_interval = refresh_interval
        self.unleash_metrics_interval = metrics_interval
        self.unleash_disable_metrics = disable_metrics
        self.unleash_disable_registration = disable_registration
        self.unleash_custom_headers = custom_headers
        self.unleash_custom_options = custom_options
        self.unleash_static_context = {
            "appName": self.unleash_app_name,
            "environment": self.unleash_environment
        }
        self.unleash_project_name = project_name

        # Class objects
        self.cache = FileCache(self.unleash_instance_id,
                               app_cache_dir=cache_directory)
        self.features: dict = {}
        self.scheduler = BackgroundScheduler()
        self.fl_job: Job = None
        self.metric_job: Job = None
        self.cache[METRIC_LAST_SENT_TIME] = datetime.now(timezone.utc)
        self.cache.sync()

        # Mappings
        default_strategy_mapping = {
            "applicationHostname": ApplicationHostname,
            "default": Default,
            "gradualRolloutRandom": GradualRolloutRandom,
            "gradualRolloutSessionId": GradualRolloutSessionId,
            "gradualRolloutUserId": GradualRolloutUserId,
            "remoteAddress": RemoteAddress,
            "userWithId": UserWithId,
            "flexibleRollout": FlexibleRollout
        }

        if custom_strategies:
            strategy_v2xx_deprecation_check(
                [x for x in custom_strategies.values()])  # pylint: disable=R1721

        self.strategy_mapping = {
            **custom_strategies,
            **default_strategy_mapping
        }

        # Client status
        self.is_initialized = False
Beispiel #24
0
class UnleashClient:
    """Client implementation."""
    def __init__(self,
                 url: str,
                 app_name: str,
                 environment: str = "default",
                 instance_id: str = "unleash-client-python",
                 refresh_interval: int = 15,
                 metrics_interval: int = 60,
                 disable_metrics: bool = False,
                 disable_registration: bool = False,
                 custom_headers: Optional[dict] = None,
                 custom_options: Optional[dict] = None,
                 custom_strategies: Optional[dict] = None,
                 cache_directory: str = None,
                 project_name: str = None) -> None:
        """
        A client for the Unleash feature toggle system.

        :param url: URL of the unleash server, required.
        :param app_name: Name of the application using the unleash client, required.
        :param environment: Name of the environment using the unleash client, optional & defaults to "default".
        :param instance_id: Unique identifier for unleash client instance, optional & defaults to "unleash-client-python"
        :param refresh_interval: Provisioning refresh interval in ms, optional & defaults to 15 seconds
        :param metrics_interval: Metrics refresh interval in ms, optional & defaults to 60 seconds
        :param disable_metrics: Disables sending metrics to unleash server, optional & defaults to false.
        :param custom_headers: Default headers to send to unleash server, optional & defaults to empty.
        :param custom_options: Default requests parameters, optional & defaults to empty.
        :param custom_strategies: Dictionary of custom strategy names : custom strategy objects
        :param cache_directory: Location of the cache directory. When unset, FCache will determine the location
        """
        custom_headers = custom_headers or {}
        custom_options = custom_options or {}
        custom_strategies = custom_strategies or {}

        # Configuration
        self.unleash_url = url.rstrip('\\')
        self.unleash_app_name = app_name
        self.unleash_environment = environment
        self.unleash_instance_id = instance_id
        self.unleash_refresh_interval = refresh_interval
        self.unleash_metrics_interval = metrics_interval
        self.unleash_disable_metrics = disable_metrics
        self.unleash_disable_registration = disable_registration
        self.unleash_custom_headers = custom_headers
        self.unleash_custom_options = custom_options
        self.unleash_static_context = {
            "appName": self.unleash_app_name,
            "environment": self.unleash_environment
        }
        self.unleash_project_name = project_name

        # Class objects
        self.cache = FileCache(self.unleash_instance_id,
                               app_cache_dir=cache_directory)
        self.features: dict = {}
        self.scheduler = BackgroundScheduler()
        self.fl_job: Job = None
        self.metric_job: Job = None
        self.cache[METRIC_LAST_SENT_TIME] = datetime.now(timezone.utc)
        self.cache.sync()

        # Mappings
        default_strategy_mapping = {
            "applicationHostname": ApplicationHostname,
            "default": Default,
            "gradualRolloutRandom": GradualRolloutRandom,
            "gradualRolloutSessionId": GradualRolloutSessionId,
            "gradualRolloutUserId": GradualRolloutUserId,
            "remoteAddress": RemoteAddress,
            "userWithId": UserWithId,
            "flexibleRollout": FlexibleRollout
        }

        if custom_strategies:
            strategy_v2xx_deprecation_check(
                [x for x in custom_strategies.values()])  # pylint: disable=R1721

        self.strategy_mapping = {
            **custom_strategies,
            **default_strategy_mapping
        }

        # Client status
        self.is_initialized = False

    def initialize_client(self) -> None:
        """
        Initializes client and starts communication with central unleash server(s).

        This kicks off:
        * Client registration
        * Provisioning poll
        * Stats poll

        :return:
        """
        # Setup
        fl_args = {
            "url": self.unleash_url,
            "app_name": self.unleash_app_name,
            "instance_id": self.unleash_instance_id,
            "custom_headers": self.unleash_custom_headers,
            "custom_options": self.unleash_custom_options,
            "cache": self.cache,
            "features": self.features,
            "strategy_mapping": self.strategy_mapping,
            "project": self.unleash_project_name
        }

        metrics_args = {
            "url": self.unleash_url,
            "app_name": self.unleash_app_name,
            "instance_id": self.unleash_instance_id,
            "custom_headers": self.unleash_custom_headers,
            "custom_options": self.unleash_custom_options,
            "features": self.features,
            "ondisk_cache": self.cache
        }

        # Register app
        if not self.unleash_disable_registration:
            register_client(self.unleash_url, self.unleash_app_name,
                            self.unleash_instance_id,
                            self.unleash_metrics_interval,
                            self.unleash_custom_headers,
                            self.unleash_custom_options, self.strategy_mapping)

        fetch_and_load_features(**fl_args)

        # Start periodic jobs
        self.scheduler.start()
        self.fl_job = self.scheduler.add_job(
            fetch_and_load_features,
            trigger=IntervalTrigger(
                seconds=int(self.unleash_refresh_interval)),
            kwargs=fl_args)

        if not self.unleash_disable_metrics:
            self.metric_job = self.scheduler.add_job(
                aggregate_and_send_metrics,
                trigger=IntervalTrigger(
                    seconds=int(self.unleash_metrics_interval)),
                kwargs=metrics_args)

        self.is_initialized = True

    def destroy(self):
        """
        Gracefully shuts down the Unleash client by stopping jobs, stopping the scheduler, and deleting the cache.

        You shouldn't need this too much!

        :return:
        """
        self.fl_job.remove()
        if self.metric_job:
            self.metric_job.remove()
        self.scheduler.shutdown()
        self.cache.delete()

    @staticmethod
    def _get_fallback_value(fallback_function: Callable, feature_name: str,
                            context: dict) -> bool:
        if fallback_function:
            fallback_value = fallback_function(feature_name, context)
        else:
            fallback_value = False

        return fallback_value

    # pylint: disable=broad-except
    def is_enabled(self,
                   feature_name: str,
                   context: Optional[dict] = None,
                   fallback_function: Callable = None) -> bool:
        """
        Checks if a feature toggle is enabled.

        Notes:
        * If client hasn't been initialized yet or an error occurs, flat will default to false.

        :param feature_name: Name of the feature
        :param context: Dictionary with context (e.g. IPs, email) for feature toggle.
        :param default_value: Allows override of default value. (DEPRECIATED, used fallback_function instead!)
        :param fallback_function: Allows users to provide a custom function to set default value.
        :return: True/False
        """
        context = context or {}
        context.update(self.unleash_static_context)

        if self.is_initialized:
            try:
                return self.features[feature_name].is_enabled(context)
            except Exception as excep:
                LOGGER.warning("Returning default value for feature: %s",
                               feature_name)
                LOGGER.warning("Error checking feature flag: %s", excep)
                return self._get_fallback_value(fallback_function,
                                                feature_name, context)
        else:
            LOGGER.warning("Returning default value for feature: %s",
                           feature_name)
            LOGGER.warning(
                "Attempted to get feature_flag %s, but client wasn't initialized!",
                feature_name)
            return self._get_fallback_value(fallback_function, feature_name,
                                            context)

    # pylint: disable=broad-except
    def get_variant(self,
                    feature_name: str,
                    context: Optional[dict] = None) -> dict:
        """
        Checks if a feature toggle is enabled.  If so, return variant.

        Notes:
        * If client hasn't been initialized yet or an error occurs, flat will default to false.

        :param feature_name: Name of the feature
        :param context: Dictionary with context (e.g. IPs, email) for feature toggle.
        :return: Dict with variant and feature flag status.
        """
        context = context or {}
        context.update(self.unleash_static_context)

        if self.is_initialized:
            try:
                return self.features[feature_name].get_variant(context)
            except Exception as excep:
                LOGGER.warning(
                    "Returning default flag/variation for feature: %s",
                    feature_name)
                LOGGER.warning("Error checking feature flag variant: %s",
                               excep)
                return DISABLED_VARIATION
        else:
            LOGGER.warning("Returning default flag/variation for feature: %s",
                           feature_name)
            LOGGER.warning(
                "Attempted to get feature flag/variation %s, but client wasn't initialized!",
                feature_name)
            return DISABLED_VARIATION
Beispiel #25
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import getopt
import requests
import json
import datetime
from fcache.cache import FileCache

mycache = FileCache('yandex')

# variables
YANDEX_ID = "XXX"
YANDEX_PASS = "******"
YANDEX_CODE = False
YANDEX_REQUEST = {'content-type': 'application/x-www-form-urlencoded'}

# @todo: is null variabble get on the database
MY_SLUG = 'login'
ALL = False

argv = sys.argv[1:]

if argv == ['-g', 'all'] or argv == ['-get', 'all']:
    ALL = True
else:

    try:
        opts, args = getopt.getopt(argv, "hi:o:", ["code=", "slug="])
    except getopt.GetoptError:
        print('Error-Used:')
Beispiel #26
0
from .backends import AzureActiveDirectoryBackend
from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME, login
from django import VERSION
from django.http import HttpResponseRedirect
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.cache import never_cache
from fcache.cache import FileCache
import uuid

ad_cache = FileCache('azure_ad_auth_cache', flag='c')

if VERSION[0] < 2:
    from django.core.urlresolvers import reverse
else:
    from django.urls import reverse

try:
    # Python 3
    from urllib.parse import urlparse
except ImportError:
    # Python 2
    from urlparse import urlparse


@never_cache
def auth(request):
    backend = AzureActiveDirectoryBackend()
    redirect_uri = request.build_absolute_uri(reverse(complete))
    nonce = str(uuid.uuid4())
    request.session['nonce'] = nonce
Beispiel #27
0
class Controller(object):
    def __init__(self, config, debugging=False):
        Utils.isDebugging = debugging
        self.config = config

        # read config
        c = self.config['site']
        self.port = c['port']
        self.port_secure = c['port_secure']

        c = self.config['config']
        self.use_https = c['use_https']
        self.use_auth = c['use_auth']
        self.use_alerts = c['use_alerts']
        self.motion_pin = c['motion_pin']
        self.file_name = "/var/log/%s.log" % (os.getcwd().split(os.sep)[-1])

        Utils.temperature_pin = c['temperature_pin']

        c = self.config['config']['times']
        self.time_to_close = c['to_close_door']
        self.time_to_open = c['to_open_door']
        self.time_to_report_open = c['to_report_open']
        self.time_to_report_still_open = c['to_report_still_open']
        self.time_to_force_close = c['to_force_close']

        c = self.config['alerts']
        self.when_opened = c['when_opened']
        self.when_closed = c['when_closed']
        self.on_days_of_week = c['on_days_of_week']
        self.from_time = c['from_time']
        self.to_time = c['to_time']
        self.alert_type = c['alert_type']

        c = self.config['mqtt']
        self.mqtt_server = c['server']
        self.mqtt_username = c['username']
        self.mqtt_password = c['password']

        c = self.config['mqtt']['topics']
        self.mqtt_topic_garage = c['garage']
        self.mqtt_topic_temperature = c['temperature']
        self.mqtt_topic_day_temperature = c['day_temperature']

        c = self.config['weatherapi']
        self.weather_url = c['url']
        self.weather_key = c['key']

        for arg in sys.argv:
            if str(arg) == 'debug':
                # ex. python controller.py debug -v
                Utils.isDebugging = True
                if self.time_to_report_open > 35:
                    self.time_to_report_open = 35
                self.time_to_report_still_open = 100
                Utils.gfileCache += "debug"

            if str(arg).startswith('port='):
                self.port = int((arg).split('=')[1])
                self.port_secure = self.port

        # set up fcache to log last time garage door was opened
        self.fileCache = FileCache(Utils.gfileCache, flag='cs')

        # set up logging
        log_fmt = '%(asctime)s %(levelname)-8s %(message)s'
        date_fmt = '%a, %m/%d/%y %H:%M:%S'
        log_level = logging.INFO

        if Utils.isDebugging:
            logging.basicConfig(datefmt=date_fmt,
                                format=log_fmt,
                                level=log_level)
            self.debugMsg = "Debugging=%s time_to_report_open=%d time_to_report_still_open %d gfileCache=%s" % (
                Utils.isDebugging, self.time_to_report_open,
                self.time_to_report_still_open, Utils.gfileCache)
        else:
            self.debugMsg = "Debugging=%s" % Utils.isDebugging
            logging.getLogger('mylogger').setLevel(logging.NOTSET)
            logging.basicConfig(datefmt=date_fmt,
                                format=log_fmt,
                                level=log_level,
                                filename=self.file_name)
            rotatingHandler = logging.handlers.RotatingFileHandler(
                self.file_name, maxBytes=5000000, backupCount=3)
            rotatingHandler.setLevel(log_level)
            rotatingHandler.setFormatter(logging.Formatter(log_fmt))
            logging.getLogger('mylogger').addHandler(rotatingHandler)

            gpio.setwarnings(False)
            gpio.cleanup()
            gpio.setmode(gpio.BCM)

        # Banner
        logging.info("<---Garage Controller starting (port=%s %s) --->" %
                     (self.port_secure, self.debugMsg))

        self.updateHandler = UpdateHandler(self)

        self.initMsg = ""

        # setup motion sensor
        if self.motion_pin != None and Utils.isDebugging != True:
            gpio.setup(self.motion_pin, gpio.IN)
            gpio.add_event_detect(self.motion_pin,
                                  gpio.RISING,
                                  callback=self.on_motion,
                                  bouncetime=300)
            logging.info("Motion pin = %s" % (self.motion_pin))

        # setup Doors from config file
        self.doors = [
            Doors.Door(x, c) for (x, c) in sorted(config['doors'].items())
        ]
        for door in self.doors:
            door.setup(gpio, self.get_time_since_last_open(door.id))
            self.set_initial_text_msg(door)

        # setup alerts
        if self.alert_type == 'smtp':
            self.use_smtp = False
            smtp_params = ("smtphost", "smtpport", "smtp_tls", "username",
                           "password", "to_email")
            self.use_smtp = ('smtp'
                             in config['alerts']) and set(smtp_params) <= set(
                                 config['alerts']['smtp'])
        elif self.alert_type == 'pushover':
            self.pushover_user_key = config['alerts']['pushover']['user_key']
            self.pushover_api_key = config['alerts']['pushover']['api_key']
        else:
            self.alert_type = None
            logging.info("No alerts configured")

        if Utils.isDebugging:
            print self.initMsg
        else:
            logging.info(self.initMsg)
            self.send_msg(self.initMsg)

    def set_time_since_last_open(self, doorName):
        self.fileCache[doorName] = Utils.get_time()

    """get time since last open, if doesn't exist default to current time and return value"""

    def get_time_since_last_open(self, doorName):
        return (self.fileCache.setdefault(doorName, Utils.get_time()))

    def get_door(self, door_id):
        for door in self.doors:
            if (door.id == door_id):
                return door
        return None

    def resetTimer(self):
        logging.info("Motion resetting timer")
        Utils.WAITING = False

    """motion detected, reset time_in_state to the current time for all open doors, after the "open" message IM has been send (send=False)"""

    def on_motion(self, pin):
        if pin != None:
            curr_time = Utils.get_time()
            if Utils.WAITING == False:
                Utils.WAITING = True
                for d in self.doors:
                    if d.state == Utils.OPEN and (d.send_open_im == False
                                                  or d.send_open_im_debug
                                                  == False):
                        cur_dt = Utils.epoch_to_datetime(curr_time).strftime(
                            Utils.TIMEFORMAT)
                        logging.info("Motion detected, reset %s (%s)" %
                                     (d.name, cur_dt))
                        d.set_open_state(curr_time)

                        t = threading.Timer(10.0, self.resetTimer)
                        t.start()

    def set_initial_text_msg(self, door):
        if len(self.initMsg) == 0:
            self.initMsg = 'Initial state of '
        else:
            self.initMsg += ', '

        self.initMsg += "%s:%s" % (door.name, door.get_state_pin())

    def door_CLOSED(self, door):
        message = ''
        curr_time = Utils.get_time()

        last_open_msg = "%s" % (Utils.get_elapsed_time(
            int(curr_time - door.tslo)))
        self.set_time_since_last_open(door.id)
        door.tslo = self.get_time_since_last_open(door.id)
        door.state = Utils.CLOSED

        ct = curr_time - door.tis.get(Utils.OPENING)
        etime = Utils.get_elapsed_time(int(ct))
        door.tis[door.state] = curr_time

        cur_dt = Utils.epoch_to_datetime(curr_time).strftime(Utils.TIMEFORMAT)
        self.publish_garage_event(door, Utils.CLOSED)

        if door.send_open_im == False:
            message = '%s was %s at %s (%s) away for(%s)' % (
                door.name, door.state, cur_dt, etime, last_open_msg)
        else:
            message = '%s was opened & %s at %s (%s) away for(%s)' % (
                door.name, door.state, cur_dt, etime, last_open_msg)
        return message

    def door_CLOSING(self, door):
        message = ''
        curr_time = Utils.get_time()
        if Utils.is_time_expired(door.tis.get(door.state), self.time_to_close,
                                 curr_time):
            door.state = Utils.CLOSED
            message = self.door_CLOSED(door)
        return message

    def door_OPEN(self, door, pin_state):
        message = ''
        curr_time = Utils.get_time()
        cur_dt = Utils.epoch_to_datetime(curr_time).strftime(Utils.TIMEFORMAT)

        if door.send_open_im_debug == True and Utils.isDebugging:
            self.logger.info("%s %s(%s)" % (door.name, door.state, pin_state))
            door.send_open_im_debug = False

        if door.send_open_im == True and Utils.is_time_expired(
                door.tis.get(door.state), self.time_to_report_open, curr_time):
            door.send_open_im = False
            message = '%s is %s at %s' % (door.name, door.state, cur_dt)

        if Utils.is_time_expired(door.tis.get(Utils.STILLOPEN),
                                 self.time_to_report_still_open, curr_time):
            door.tis[Utils.STILLOPEN] = Utils.round_up_minutes(curr_time)
            message = '%s is still %s at %s' % (door.name, door.state, cur_dt)

        #etime = Utils.elapsed_time(int(curr_time - door.tis.get(door.state)))
        #if self.time_to_force_close != None and Utils.isTimeExpired(door.tis.get(Utils.FORCECLOSE), self.time_to_force_close, curr_time):
        #    door.tis[Utils.FORCECLOSE] = curr_time
        #    message = '%s force closed %s->%s at %s (%s)' % (door.name, door.state, Utils.CLOSED, cur_dt, etime)
        #    door.toggle_relay()

        return message

    def door_OPENING(self, door):
        curr_time = Utils.get_time()
        message = ''

        self.publish_garage_event(door, Utils.OPENING)
        if Utils.is_time_expired(door.tis.get(door.state), self.time_to_open,
                                 curr_time):
            #self.logger.info("%s %s->%s" % (door.name, door.state, OPEN))
            door.state = Utils.OPEN
            door.set_open_state(curr_time)
        return message

    def check_status(self):
        try:
            for door in self.doors:
                self.check_door_status(door)
        except Exception as e:
            self.logger.info("Error check_status %s" % e)

    def check_door_status(self, door):
        self.logger = logging.getLogger(__name__)
        message = ''
        curr_time = Utils.get_time()
        pin_state = door.get_state_pin()

        if pin_state != door.state:
            if door.state != Utils.OPENING and door.state != Utils.CLOSING:
                door.state = Utils.OPENING if door.state == Utils.CLOSED else Utils.CLOSING
                door.tis[door.state] = curr_time
                if Utils.isDebugging:
                    self.logger.info("%s %s(%s)" %
                                     (door.name, door.state, pin_state))

        if door.state == Utils.OPENING:
            message = self.door_OPENING(door)

        elif (door.state == Utils.CLOSING):
            message = self.door_CLOSING(door)

        elif door.state == Utils.OPEN:
            message = self.door_OPEN(door, pin_state)

        if message != "":
            self.logger.info(message)
            self.send_msg(message)

        self.updateHandler.handle_updates()

    def publish_garage_event(self, door, msg):
        pubMsg = ""

        if msg == Utils.OPENING and door.send_open_mqtt == True:
            door.send_open_mqtt = False
            door.send_close_mqtt = True
            pubMsg += door.name + "|" + msg
        elif msg == Utils.CLOSED and door.send_close_mqtt == True:
            door.send_close_mqtt = False
            door.send_open_mqtt = True
            pubMsg += door.name + "|" + msg

        if pubMsg != "":
            Utils.publish_MQTT(self.mqtt_server, self.mqtt_topic_garage,
                               pubMsg, self.mqtt_username, self.mqtt_password)

    def can_send_alert(self):
        dt = Utils.get_date_time()
        if self.use_alerts:
            return Utils.is_day_of_week(
                self, dt.weekday()) and Utils.is_time_between(self, dt.time())
        return False

    def send_msg(self, message):
        if Utils.isDebugging:
            logging.info("PO - %s" % (message))
            return

        if self.can_send_alert():
            if self.alert_type == 'smtp':
                self.send_text(message)
            elif self.alert_type == 'pushover':
                self.send_pushover(message)

    def send_text(self, msg):
        self.logger = logging.getLogger(__name__)

        if Utils.is_too_early():
            return

        #logging.info("SM - %s" % (msg))
        try:
            if self.use_smtp:
                config = self.config['alerts']['smtp']
                server = smtplib.SMTP(config["smtphost"], config["smtpport"])
                if (config["smtp_tls"] == True):
                    server.starttls()
                    server.login(config["username"], config["password"])
                    mg = MIMEText(msg)
                    server.sendmail('from', config["to_email"], mg.as_string())
        except smtplib.SMTPException as e:
            self.logger.error("Error: unable to send gmail text %s", e)
        except:
            self.logger.error("Main Exception: %s", sys.exc_info()[0])
        finally:
            try:
                server.quit()
            except smtplib.SMTPServerDisconnected as sd:
                self.logger.error("sd Error: .quit() failed %s", sd)
                server.close()
            except:
                self.logger.error("final Exception: %s", sys.exc_info()[0])

    def send_pushover(self, message):
        self.logger = logging.getLogger(__name__)

        if Utils.is_too_early():
            return
        try:
            conn = httplib.HTTPSConnection("api.pushover.net:443")
            conn.request(
                "POST", "/1/messages.json",
                urllib.urlencode({
                    "token": self.pushover_api_key,
                    "user": self.pushover_user_key,
                    "title": 'Garage',
                    "sound": 'pushover',
                    "message": message,
                }), {"Content-type": "application/x-www-form-urlencoded"})
            conn.getresponse()
        except socket.gaierror as e:
            self.logger.error("send_pushover Exception2: %s", e)
        except:
            self.logger.error("send_pushover Exception: %s", sys.exc_info()[0])

    def close_all(self):
        self.logger = logging.getLogger(__name__)
        message = ''
        for door in self.doors:
            if door.get_state_pin() != Utils.CLOSED:
                if door.state == Utils.CLOSING or door.state == Utils.OPENING:
                    message += door.name + " Closing or Opening, "
                elif door.state == Utils.OPEN:
                    if message == None:
                        message = 'Close All: '
                    message += door.name + '(' + door.state + ')'
                    door.toggle_relay()
                    time.sleep(0.2)

        if message != '':
            self.logger.info(message)
            return True
        else:
            return False

    def get_updates(self, lastupdate):
        updates = []
        for d in self.doors:
            timeinstate = d.tis.get(d.state)
            if timeinstate >= lastupdate:
                updates.append((d.id, d.state, timeinstate))
        return updates

    def get_config_with_default(self, config, param, default):
        if not config:
            return default
        if not param in config:
            return default
        return config[param]

    def get_temp(self):
        msg = Utils.get_temperature(Utils.temperature_pin)
        if "error reading" in msg:
            logging.info("Error getting temperature")
        if msg != "":
            Utils.publish_MQTT(self.mqtt_server, self.mqtt_topic_temperature,
                               msg, self.mqtt_username, self.mqtt_password)
        return msg

    def get_weather(self):
        #logging.info("calling weatherAPI")
        Utils.query_weather_API(requests, controller)

    def run(self):
        root = File('www')
        root.putChild('upd', self.updateHandler)
        root.putChild(
            'cfg', ConfigHandler(self))  # this prints the doors on the webpage
        root.putChild('upt', UptimeHandler(self))
        root.putChild('log', LogHandler(self))
        root.putChild('temps', TempsHandler(self))
        root.putChild('gettemp', GetTempHandler(self))
        root.putChild('closeall', CloseAllHandler(self))
        root.putChild('clk', ClickHandler(self))
        root.putChild('openclose', ClickOpenCloseHandler(self))
        root.putChild('mot', ClickMotionTestHandler(self))
        root.putChild('graph', ClickGraphHandler(self))
        root.putChild('graphshed', ClickGraphShedHandler(self))
        root.putChild('weather', ClickWeatherHandler(self))
        task.LoopingCall(self.check_status).start(1.0)
        task.LoopingCall(self.get_temp).start(1.0 * 60 * 60)  # every hour
        task.LoopingCall(self.get_weather).start(1.0 * 60 * 60 *
                                                 12)  # every 12 hours

        site = server.Site(root)

        if not self.get_config_with_default(self.config['config'], 'use_https',
                                            False):
            reactor.listenTCP(self.port, site)  # @UndefinedVariable
        else:
            sslContext = ssl.DefaultOpenSSLContextFactory(
                self.config['site']['ssl_key'],
                self.config['site']['ssl_cert'])
            reactor.listenSSL(self.port_secure, site,
                              sslContext)  # @UndefinedVariable

        reactor.run()  # @UndefinedVariable
Beispiel #28
0
import json
import re
from typing import Collection, Dict, List

from arpeggio import NoMatch, ParserPython, visit_parse_tree
from fcache.cache import FileCache
from loguru import logger

from .parseRules import entrypoint
from .treeVisitor import Visitor

NEW_LINE_FIX = re.compile('\r\n|\r')
CACHE = FileCache(__name__)

AST = List[Dict[str, Collection[Collection[str]]]]

rules = {
    'ws': ['spaces'],
    r'\t| ': ['spaces'],
    'escapedText': ['text'],
    'escapedTextUntilNewLine': ['text untill new line'],
    'plainText': ['text'],
    'plainTextUntilNewLine': ['text'],
    'name': ['name'],
    'singleQuotedString': ['single quoted string'],
    'doubleQuotedString': ['double quoted string'],
    'codeString': ['string with ticks instead of quotes'],
    'string': ['single quoted string', 'double qouted string'],
    'beginTag': ['['],
    'beginOneLineTag': ['[['],
    'endTag': [']'],
Beispiel #29
0
    def __init__(self, config, debugging=False):
        Utils.isDebugging = debugging
        self.config = config

        # read config
        c = self.config['site']
        self.port = c['port']
        self.port_secure = c['port_secure']

        c = self.config['config']
        self.use_https = c['use_https']
        self.use_auth = c['use_auth']
        self.use_alerts = c['use_alerts']
        self.motion_pin = c['motion_pin']
        self.file_name = "/var/log/%s.log" % (os.getcwd().split(os.sep)[-1])

        Utils.temperature_pin = c['temperature_pin']

        c = self.config['config']['times']
        self.time_to_close = c['to_close_door']
        self.time_to_open = c['to_open_door']
        self.time_to_report_open = c['to_report_open']
        self.time_to_report_still_open = c['to_report_still_open']
        self.time_to_force_close = c['to_force_close']

        c = self.config['alerts']
        self.when_opened = c['when_opened']
        self.when_closed = c['when_closed']
        self.on_days_of_week = c['on_days_of_week']
        self.from_time = c['from_time']
        self.to_time = c['to_time']
        self.alert_type = c['alert_type']

        c = self.config['mqtt']
        self.mqtt_server = c['server']
        self.mqtt_username = c['username']
        self.mqtt_password = c['password']

        c = self.config['mqtt']['topics']
        self.mqtt_topic_garage = c['garage']
        self.mqtt_topic_temperature = c['temperature']
        self.mqtt_topic_day_temperature = c['day_temperature']

        c = self.config['weatherapi']
        self.weather_url = c['url']
        self.weather_key = c['key']

        for arg in sys.argv:
            if str(arg) == 'debug':
                # ex. python controller.py debug -v
                Utils.isDebugging = True
                if self.time_to_report_open > 35:
                    self.time_to_report_open = 35
                self.time_to_report_still_open = 100
                Utils.gfileCache += "debug"

            if str(arg).startswith('port='):
                self.port = int((arg).split('=')[1])
                self.port_secure = self.port

        # set up fcache to log last time garage door was opened
        self.fileCache = FileCache(Utils.gfileCache, flag='cs')

        # set up logging
        log_fmt = '%(asctime)s %(levelname)-8s %(message)s'
        date_fmt = '%a, %m/%d/%y %H:%M:%S'
        log_level = logging.INFO

        if Utils.isDebugging:
            logging.basicConfig(datefmt=date_fmt,
                                format=log_fmt,
                                level=log_level)
            self.debugMsg = "Debugging=%s time_to_report_open=%d time_to_report_still_open %d gfileCache=%s" % (
                Utils.isDebugging, self.time_to_report_open,
                self.time_to_report_still_open, Utils.gfileCache)
        else:
            self.debugMsg = "Debugging=%s" % Utils.isDebugging
            logging.getLogger('mylogger').setLevel(logging.NOTSET)
            logging.basicConfig(datefmt=date_fmt,
                                format=log_fmt,
                                level=log_level,
                                filename=self.file_name)
            rotatingHandler = logging.handlers.RotatingFileHandler(
                self.file_name, maxBytes=5000000, backupCount=3)
            rotatingHandler.setLevel(log_level)
            rotatingHandler.setFormatter(logging.Formatter(log_fmt))
            logging.getLogger('mylogger').addHandler(rotatingHandler)

            gpio.setwarnings(False)
            gpio.cleanup()
            gpio.setmode(gpio.BCM)

        # Banner
        logging.info("<---Garage Controller starting (port=%s %s) --->" %
                     (self.port_secure, self.debugMsg))

        self.updateHandler = UpdateHandler(self)

        self.initMsg = ""

        # setup motion sensor
        if self.motion_pin != None and Utils.isDebugging != True:
            gpio.setup(self.motion_pin, gpio.IN)
            gpio.add_event_detect(self.motion_pin,
                                  gpio.RISING,
                                  callback=self.on_motion,
                                  bouncetime=300)
            logging.info("Motion pin = %s" % (self.motion_pin))

        # setup Doors from config file
        self.doors = [
            Doors.Door(x, c) for (x, c) in sorted(config['doors'].items())
        ]
        for door in self.doors:
            door.setup(gpio, self.get_time_since_last_open(door.id))
            self.set_initial_text_msg(door)

        # setup alerts
        if self.alert_type == 'smtp':
            self.use_smtp = False
            smtp_params = ("smtphost", "smtpport", "smtp_tls", "username",
                           "password", "to_email")
            self.use_smtp = ('smtp'
                             in config['alerts']) and set(smtp_params) <= set(
                                 config['alerts']['smtp'])
        elif self.alert_type == 'pushover':
            self.pushover_user_key = config['alerts']['pushover']['user_key']
            self.pushover_api_key = config['alerts']['pushover']['api_key']
        else:
            self.alert_type = None
            logging.info("No alerts configured")

        if Utils.isDebugging:
            print self.initMsg
        else:
            logging.info(self.initMsg)
            self.send_msg(self.initMsg)
Beispiel #30
0
 def __init__(self, cache_name: str = 'nflapi'):
     self.cache = FileCache(cache_name, flag='cs')
     self.cache.clear()
Beispiel #31
0
class ConfigFinder:
    def __init__(self, config_file: str = "SpotiBot.json") -> None:
        """Instantiates instances of environment configuration from .ini file.

        Args:
            config_file: Name of .ini configuration file following the
                format of SpotiBot_SAMPLE.ini
        """
        self.cache = FileCache(config_file.split(r".")[0], flag="cs")
        self.config_file = config_file
        self.path_to_config = self.cache.get(r"path_to_config")

    def clear_cache(self) -> object:
        """Clears cached path to configuration file."""
        self.cache.clear()
        return self

    @property
    def cache_exists(self) -> bool:
        """Checks to see if a cached file path exists to a valid file."""
        try:
            return os.path.isfile(self.path_to_config)
        except:
            return False

    @property
    def cache_is_valid(self) -> bool:
        """Checks to see if the valid file path contains the config file."""
        try:
            return self.config_file == os.path.basename(self.path_to_config)
        except:
            return False

    def locate_config(self):
        """Traverse file system from bottom up to locate config file."""
        for dirpath, dirnames, files in os.walk(os.path.expanduser("~"),
                                                topdown=False):

            if self.config_file in files:
                self.path_to_config = os.path.join(dirpath, self.config_file)
                break

            else:
                self.path_to_config = None

        return self.path_to_config

    def get_path(self) -> str:
        """Checks for cache existence and validates - traverses OS if not."""
        print("Locating configuration...")

        print("\t<1 of 2> Checking for cached path...")

        if self.cache_exists and self.cache_is_valid:
            print(f"\t<2 of 2> Found cached path: {self.path_to_config}")

        else:
            print("\t<2 of 2> Cached path not found")
            print(f"\nLooking for {self.config_file} in local file system..")

            self.path_to_config = self.locate_config()

            if self.path_to_config:
                print(f"\t<1 of 1> '{self.config_file}' found at: "
                      f"{self.path_to_config}")
            else:
                print(f"\t<1 of 1> Could not find config file"
                      f" {self.config_file} please double check the name of "
                      f"your configuration file or value passed in the"
                      f"'config_file' argument")

        return self.path_to_config

    def read_file(self) -> object:
        """Locates creds file and caches location.

        Returns:
            Dictionary containing SpotiBot configuration params

        """
        self.path_to_config = self.get_path()
        self.cache["path_to_config"] = self.path_to_config

        try:
            with open(self.path_to_config, "r") as r:
                self.cfg = json.load(r)

        except IOError as e:
            print(e)

        return self
Beispiel #32
0
from rdkit.Chem.rdMolDescriptors import CalcMolFormula
from pubchempy import get_compounds, Compound
import json
import db_preprocessor
from collections import Counter
from chemicals import serialize_formula

db_preprocessor.write()

os.system('python2 parse_pdf.py')

from fcache.cache import FileCache

mycache = FileCache(
    'myapp',
    flag='cs',
    serialize=True,
    app_cache_dir=
    '/home/caleb/Documents/University/CHE3123/chemical-metadata/fcache')

syn_data = open('Good synoynms by CAS.json').read()
syn_data = json.loads(syn_data)

all_user_names = []
for CAS, d in syn_data.items():
    if 'synonyms' in d:
        all_user_names.extend(d['synonyms'])
all_user_names = set(all_user_names)

pdf_data = open('Parsed scifinder metadata.json').read()
pdf_data = json.loads(pdf_data)