Ejemplo n.º 1
0
def log_inmates(inmates, recent=False, mode='a'):
    """Log to file all Inmate information excluding mug shot image data.

    Args:
        inmates: List of Inmate objects to be processed.
        recent: Default of False will append to the main log file.
            Specifying True will overwrite the separate recent log, which
            is representative of the inmates seen during the last check.
    """
    if recent:
        location = staticconf.read('path.recent_inmate_log')
        mode = 'w'
    else:
        location = staticconf.read('path.inmate_log')
    if recent:
        log_function = log.debug
    else:
        log_function = log.info
    with open(location, mode=mode, encoding='utf-8') as f:
        for inmate in inmates:
            log_function(
                'Recording inmate to the %s log: %s',
                'recent' if recent else 'standard',
                inmate,
            )
            f.write(inmate.to_json() + '\n')
Ejemplo n.º 2
0
def read_log(recent=False):
    """Loads Inmate information from log to re-create Inmate objects.

    Mug shot data is not retrieved, neither from file nor server.

    :param recent: Default of False will read from the main log file.
        Specifying True will read the separate recent log, which
        is representative of the inmates seen during the last check.
        While this is not the default, it is the option most used.
    :type recent: bool

    :returns: The raw inmate objects from the log.
    :rtype: list of dict
    """
    if recent:
        location = staticconf.read('path.recent_inmate_log')
    else:
        location = staticconf.read('path.inmate_log')
    log.debug(
        'Reading inmates from {log_name} log'.format(
            log_name='recent' if recent else 'standard',
        )
    )
    inmate_list = []
    try:
        with open(location, encoding='utf-8') as f:
            for line in f:
                inmate_list.append(json.loads(line))
    except IOError as e:
        # No such file
        if e.errno == errno.ENOENT:
            pass
        else:
            raise
    return inmate_list
Ejemplo n.º 3
0
    def parse_config(self, config_file_path):
        """Parses the configuration file

        Args:
            config_file_path (string): path to the configuration file
        """
        # Read main logfeeder configuration file
        staticconf.YamlConfiguration(config_file_path)
        self.aws_config_filepath = staticconf.read(
            'logfeeder.aws_config_filepath')
        self.domain = staticconf.read('logfeeder.domain')
        app_file = staticconf.read('{0}.file'.format(self.APP_NAME))

        # Read app specific configuration file
        contents = staticconf.YamlConfiguration(app_file)
        self.api_creds_filepath = staticconf.read('api_creds_filepath')
        if 'rate_limiter_num_calls_per_timeunit' in contents:
            self.rate_limiter = RateLimiter(
                calls_per_timeunit=staticconf.read_int(
                    'rate_limiter_num_calls_per_timeunit'),
                seconds_per_timeunit=staticconf.read_int(
                    'rate_limiter_num_seconds_per_timeunit'),
            )
        self.sub_apis = {}
        for key in contents:
            if key.startswith('enable_'):
                name_of_subapi = key.split('enable_', 1)[1]
                self.sub_apis[name_of_subapi] = staticconf.read_bool(key)
        # If an API doesn't have any sub_apis, then set set its APP_NAME to self.sub_apis for code compatibility
        if not self.sub_apis:
            self.sub_apis = {self.APP_NAME: True}
Ejemplo n.º 4
0
 def parse_config(self, config_file_path):
     super(S3Feeder, self).parse_config(config_file_path)
     self.s3_event_notifications_queue_name = staticconf.read(
         's3_event_notifications_queue_name')
     self.number_messages = staticconf.read('number_messages', default=1)
     self.aws_region = staticconf.read('aws_region', default=None)
     self.owner_account_id = staticconf.read_string('owner_account_id')
     self.role_arn = staticconf.read('role_arn', default=None)
Ejemplo n.º 5
0
def get_twitter_client():
    if not staticconf.read_bool('twitter.enabled', default=False):
        return None
    return Twython(
        app_key=staticconf.read('twitter.api_key'),
        app_secret=staticconf.read('twitter.api_secret'),
        oauth_token=staticconf.read('twitter.access_token'),
        oauth_token_secret=staticconf.read('twitter.access_token_secret'),
    )
Ejemplo n.º 6
0
def main():
    """TODO DESCRIBE THIS
    """
    staticconf.YamlConfiguration(CONFIG)
    generate_biased_data(
        policy=policy.get_policy(
            complexity=staticconf.read('generate_data.policy.complexity'),
            num_features=staticconf.read('generate_data.num_features'),
            mean=staticconf.read('generate_data.policy.mean'),
            std=staticconf.read('generate_data.policy.std'),
        ),
    )
Ejemplo n.º 7
0
def _create_initial_model(policy):
    """TODO DESCRIBE THIS
    """
    fake_data = numpy.random.random(
        (
            staticconf.read('generate_data.num_examples') * 5,
            staticconf.read('generate_data.num_features'),
        ),
    )
    return model.train_single_model(
        X=fake_data,
        Y=policy(X=fake_data),
    )
Ejemplo n.º 8
0
def _get_opener():
    """Use a proxy (Polipo through Tor) to send our requests through."""
    # If Polipo isn't running, you might need to start it manually
    #   after Tor, and if so be sure to use whatever port it is
    #   listening on (such as 8123). The default port for Polipo used
    #   in the Tor Vidalia Bundle is 8118.
    proxy_support = urllib.request.ProxyHandler({
        'http': '{host}:{port}'.format(
            host=staticconf.read('proxy.host'),
            port=staticconf.read('proxy.port'),
        )
    })
    opener = urllib.request.build_opener(proxy_support)
    return opener
Ejemplo n.º 9
0
def most_recent_mug(inmate):
    """Returns the filename of the most recent mug shot for the Inmate.

    Args:
        inmates: List of Inmate objects to be processed.
    """
    best = ''
    for filename in os.listdir(staticconf.read('path.mug_shot_dir')):
        # First conditional is for the original filename. The second
        # conditional is for newer timestamps.
        if (fnmatch.fnmatch(filename, '{}.jpg'.format(inmate.id)) or
                fnmatch.fnmatch(filename, '{}_*.jpg'.format(inmate.id))):
            log.debug(
                'Found recent mug candidate for inmate-ID %s: %r',
                inmate.id,
                filename,
            )
            if filename > best:
                best = filename
                log.debug(
                    'Best recent mug candidate so far for inmate-ID %s: %r',
                    inmate.id,
                    filename,
                )
    if not best:
        log.debug('Found no recent mug shot for inmate-ID %s.', inmate.id)
    return best
Ejemplo n.º 10
0
    def ussd_initial(self, *args, **kwargs):
        if self.customer_journey_conf is None \
                or self.customer_journey_namespace is None:
            raise MissingAttribute("attribute customer_journey_conf and "
                                   "customer_journey_namespace are required")
        if self.customer_journey_namespace not in \
                staticconf.config.configuration_namespaces:
            load_yaml(
                self.customer_journey_conf
            )

        # confirm variable template has been loaded
        # get initial screen
        initial_screen = staticconf.read(
            "initial_screen",
            namespace=self.customer_journey_conf)

        if isinstance(initial_screen, dict) and \
                initial_screen.get('variables'):
            variable_conf = initial_screen['variables']
            file_path = variable_conf['file']
            namespace = variable_conf['namespace']
            if not namespace in \
                   staticconf.config.configuration_namespaces:
                load_yaml(file_path)

        self.initial_screen = initial_screen \
            if isinstance(initial_screen, dict) \
            else {"initial_screen": initial_screen}
Ejemplo n.º 11
0
    def run_handlers(self, ussd_request):

        handler = ussd_request.session['_ussd_state']['next_screen'] \
            if ussd_request.session.get('_ussd_state', {}).get('next_screen') \
            else "initial_screen"

        ussd_response = (ussd_request, handler)

        if handler != "initial_screen":
            # get start time
            start_time = utilities.string_to_datetime(
                ussd_request.session["ussd_interaction"][-1]["start_time"])
            end_time = datetime.now()
            # Report in milliseconds
            duration = (end_time - start_time).total_seconds() * 1000
            ussd_request.session["ussd_interaction"][-1].update({
                "input":
                ussd_request.input,
                "end_time":
                utilities.datetime_to_string(end_time),
                "duration":
                duration
            })

        # Handle any forwarded Requests; loop until a Response is
        # eventually returned.
        while not isinstance(ussd_response, UssdResponse):
            ussd_request, handler = ussd_response

            screen_content = staticconf.read(
                handler, namespace=self.customer_journey_namespace)

            screen_type = 'initial_screen' \
                if handler == "initial_screen" and \
                   isinstance(screen_content, str) \
                else screen_content['type']

            ussd_response = _registered_ussd_handlers[screen_type](
                ussd_request,
                handler,
                screen_content,
                initial_screen=self.initial_screen,
                logger=self.logger).handle()

        ussd_request.session['_ussd_state']['next_screen'] = handler

        ussd_request.session['ussd_interaction'].append({
            "screen_name":
            handler,
            "screen_text":
            str(ussd_response),
            "input":
            ussd_request.input,
            "start_time":
            utilities.datetime_to_string(datetime.now())
        })
        # Attach session to outgoing response
        ussd_response.session = ussd_request.session

        return ussd_response
Ejemplo n.º 12
0
    def ussd_initial(self, request, *args, **kwargs):
        if hasattr(self, 'get_customer_journey_conf'):
            self.customer_journey_conf = self.get_customer_journey_conf(
                request)
        if hasattr(self, 'get_customer_journey_namespace'):
            self.customer_journey_namespace = \
                self.get_customer_journey_namespace(request)

        if self.customer_journey_conf is None \
                or self.customer_journey_namespace is None:
            raise MissingAttribute("attribute customer_journey_conf and "
                                   "customer_journey_namespace are required")
        utility.observeFile(self.customer_journey_conf)
        if not self.customer_journey_namespace in \
                staticconf.config.configuration_namespaces:
            load_yaml(self.customer_journey_conf,
                      self.customer_journey_namespace)

        # confirm variable template has been loaded
        # get initial screen

        screen_content = staticconf.read(
            "initial_screen", namespace=self.customer_journey_namespace)

        if isinstance(screen_content, dict) and \
                screen_content.get('variables'):
            variable_conf = screen_content['variables']
            file_path = variable_conf['file']
            namespace = variable_conf['namespace']
            if not namespace in \
                    staticconf.config.configuration_namespaces:
                load_yaml(file_path, namespace)
Ejemplo n.º 13
0
    def get(self, key: str, options: Dict):
        if not self.inited:
            self.init()
            self.inited = True

        try:
            return staticconf.read(key)
        except:
            return None
Ejemplo n.º 14
0
def load_config(config_path):
    """Reads the configuration from a YAML file located at
    `config_path`.
    """
    config = {}
    staticconf.YamlConfiguration(config_path)

    # Get all brands for the ELG talk
    config['brands'] = staticconf.read('brands')

    return config
Ejemplo n.º 15
0
def enable(args: argparse.Namespace) -> None:
    dynamodb.delete_item(TableName=staticconf.read(
        'aws.state_table', default=CLUSTERMAN_STATE_TABLE),
                         Key={
                             'state': {
                                 'S': AUTOSCALER_PAUSED
                             },
                             'entity': {
                                 'S':
                                 f'{args.cluster}.{args.pool}.{args.scheduler}'
                             },
                         })
Ejemplo n.º 16
0
def _should_throttle(at_time):
    minimum_report_age_s = staticconf.read('minimum_report_age_s')
    minimum_report_time = at_time - minimum_report_age_s
    try:
        last_report_time = os.path.getmtime(
            staticconf.read('path.recent_report_html'),
        )
    except OSError:
        log.warning('No recent report, so not throttling.')
        return 0
    if minimum_report_time < last_report_time:
        last_report_relative_s = int(at_time - last_report_time)
        log.info(
            (
                'Throttling since last report was generated %d s ago, '
                'which is less than %d s.'
            ),
            last_report_relative_s,
            minimum_report_age_s,
        )
        return minimum_report_age_s - last_report_relative_s
    return 0
Ejemplo n.º 17
0
def enable(args: argparse.Namespace) -> None:
    dynamodb.delete_item(
        TableName=staticconf.read('aws.state_table', default=CLUSTERMAN_STATE_TABLE),
        Key={
            'state': {'S': AUTOSCALER_PAUSED},
            'entity': {'S': f'{args.cluster}.{args.pool}.{args.scheduler}'},
        }
    )
    time.sleep(1)  # Give DynamoDB some time to settle
    now = parse_time_string('now').to('local')
    if autoscaling_is_paused(args.cluster, args.pool, args.scheduler, now):
        print('Something went wrong!  The autoscaler is paused')
    else:
        print(f'The autoscaler for {args.cluster}.{args.pool}.{args.scheduler} was enabled at {now}')
Ejemplo n.º 18
0
    def run_handlers(self, ussd_request):
        if ussd_request.session['_ussd_state']['next_screen']:
            handler = ussd_request.session['_ussd_state']['next_screen']
        else:
            handler = staticconf.read(
                'initial_screen', namespace=self.customer_journey_namespace)
            if isinstance(handler, dict):
                # set default language from namespace
                if 'default_language' in handler:
                    ussd_request.default_language = handler.get('default_language', ussd_request.default_language)
                handler = handler["screen"]
        ussd_response = (ussd_request, handler)

        # Handle any forwarded Requests; loop until a Response is
        # eventually returned.
        while not isinstance(ussd_response, UssdResponse):
            ussd_request, handler = ussd_response

            screen_content = staticconf.read(
                handler,
                namespace=self.customer_journey_namespace)

            ussd_response = _registered_ussd_handlers[screen_content['type']](
                ussd_request,
                handler,
                screen_content,
                template_namespace=self.template_namespace,
                logger=self.logger
            ).handle()

        ussd_request.session['_ussd_state']['next_screen'] = handler


        # Attach session to outgoing response
        ussd_response.session = ussd_request.session

        return ussd_response
Ejemplo n.º 19
0
    def run_handlers(self, ussd_request):

        handler = ussd_request.session['_ussd_state']['next_screen'] \
            if ussd_request.session.get('_ussd_state', {}).get('next_screen') \
            else "initial_screen"

        ussd_response = (ussd_request, handler)

        if handler != "initial_screen":
            ussd_request.session["ussd_interaction"][-1].update(
                {"input": ussd_request.input})

        # Handle any forwarded Requests; loop until a Response is
        # eventually returned.
        while not isinstance(ussd_response, UssdResponse):
            ussd_request, handler = ussd_response

            screen_content = staticconf.read(
                handler, namespace=self.customer_journey_namespace)

            screen_type = 'initial_screen' \
                if handler == "initial_screen" and \
                   isinstance(screen_content, str) \
                else screen_content['type']

            ussd_response = _registered_ussd_handlers[screen_type](
                ussd_request,
                handler,
                screen_content,
                template_namespace=ussd_request.session.get(
                    'template_namespace', None),
                logger=self.logger).handle()

        ussd_request.session['_ussd_state']['next_screen'] = handler

        ussd_request.session['ussd_interaction'].append({
            "screen_name":
            handler,
            "screen_text":
            str(ussd_response),
            "input":
            ussd_request.input
        })
        # Attach session to outgoing response
        ussd_response.session = ussd_request.session

        return ussd_response
Ejemplo n.º 20
0
def disable(args: argparse.Namespace) -> None:
    state = {
        'state': {
            'S': AUTOSCALER_PAUSED
        },
        'entity': {
            'S': f'{args.cluster}.{args.pool}.{args.scheduler}'
        },
    }
    if args.until:
        state['expiration_timestamp'] = {
            'N': str(parse_time_string(args.until).timestamp)
        }

    dynamodb.put_item(
        TableName=staticconf.read('aws.state_table',
                                  default=CLUSTERMAN_STATE_TABLE),
        Item=state,
    )
Ejemplo n.º 21
0
def get_most_inmates_count():
    """Returns the filename of the most recent mug shot for the Inmate.

    Returns:
        A tuple with the last most_count and the on_date when that occurred.
    """
    most_count, on_date = (None, None)
    try:
        with open(staticconf.read('path.most_inmate_count'), mode='r') as f:
            (most_count, on_date) = f.read().split('\n')
            most_count = int(most_count)
    except IOError as e:
        # No such file
        if e.errno == errno.ENOENT:
            log.warning('No file with statistics found.')
        else:
            raise
    except ValueError:
        log.warning('Could not parse data from file.')
    return (most_count, on_date)
Ejemplo n.º 22
0
def main(args: argparse.Namespace) -> None:
    staticconf.YamlConfiguration(args.config, flatten=False)
    backup_set_config = staticconf.read('backups')[args.name]
    staticconf.DictConfiguration(backup_set_config, namespace=args.name)
    backup_store = get_backup_store(args.name)

    if args.manifest:
        manifest = Manifest(args.filename)
        private_key_filename = backup_store.config.read('private_key_filename',
                                                        default='')
        lock_manifest(
            manifest,
            private_key_filename,
            backup_store._save,
            backup_store._load,
            backup_store.options,
        )
    else:
        with backup_store.unlock():
            backup_store.save_if_new(args.filename)
Ejemplo n.º 23
0
def _get_jail_report(bucket):
    html = jail.get_jail_report()
    if html is None:
        # Without a report, there is nothing to do.
        return None
    with open(
        staticconf.read('path.recent_report_html'),
        mode='w',
        encoding='utf-8',
    ) as f:
        # Useful for debugging to have a copy of the last seen page.
        # Also used to throttle automatic restarts.
        f.write(html)
    if bucket is not None:
        # Archive the report so it can be processed or analyzed later.
        jail.save_jail_report_to_s3(
            bucket=bucket,
            html=html,
            timestamp=datetime.datetime.utcnow(),
        )
    return html
Ejemplo n.º 24
0
def get_mug_shots(inmates, bucket):
    """Retrieves the mug shot for each Inmate and stores it in the Inmate."""
    log.info('Getting mug shots')
    opener = _get_opener()
    for inmate in inmates:
        log.info('Opening mug shot URL (ID: %s)', inmate.id)
        uri = (
            'http://dpdjailview.cityofdenton.com/'
            'ImageHandler.ashx?type=image&imageID={mug_id}'
        ).format(mug_id=inmate.id)
        try:
            with util.timeout(
                seconds=staticconf.read('timeout.open_one_mug_shot'),
            ):
                response = opener.open(uri)
            image_data = response.read()
        except urllib.error.HTTPError as e:
            log.warning(
                'Unable to retrieve inmate-ID %s due to HTTP %s: %r',
                inmate.id,
                e.code,
                e,
            )
            continue
        except http.client.BadStatusLine as e:
            log.warning(
                'Unable to retrieve inmate-ID %s: %r',
                inmate.id,
                e,
            )
            continue
        except TimeoutError:
            log.warning(
                'Timeout while getting mug shot for inmate-ID %s.',
                inmate.id,
            )
            continue
        inmate.mug = image_data
        if bucket is not None:
            _save_mug_shot_to_s3(bucket=bucket, inmate=inmate)
Ejemplo n.º 25
0
def disable(args: argparse.Namespace) -> None:
    ensure_account_id(args.cluster)

    state = {
        'state': {
            'S': AUTOSCALER_PAUSED
        },
        'entity': {
            'S': f'{args.cluster}.{args.pool}.{args.scheduler}'
        },
        'timestamp': {
            'N': str(int(time.time()))
        },
    }

    if args.until:
        state['expiration_timestamp'] = {
            'N': str(parse_time_string(args.until).timestamp)
        }

    dynamodb.put_item(
        TableName=staticconf.read('aws.state_table',
                                  default=CLUSTERMAN_STATE_TABLE),
        Item=state,
    )

    time.sleep(1)  # Give DynamoDB some time to settle

    now = parse_time_string('now').to('local')

    if not autoscaling_is_paused(args.cluster, args.pool, args.scheduler, now):
        print('Something went wrong!  The autoscaler is NOT paused')
    else:
        s = f'The autoscaler for {args.cluster}.{args.pool}.{args.scheduler} was paused at {now}'

        if args.until:
            until_str = str(parse_time_string(args.until).to('local'))
            s += f' until {until_str}'

        print(s)
Ejemplo n.º 26
0
def get_jail_report():
    """Retrieves the Denton City Jail Custody Report webpage."""
    log.info('Getting Jail Report')
    opener = _get_opener()
    try:
        with util.timeout(seconds=staticconf.read('timeout.open_jail_report')):
            response = opener.open('http://dpdjailview.cityofdenton.com/')
        log.debug('Reading jail report page')
        html = response.read().decode('utf-8')
    except urllib.error.HTTPError as error:
        html = None
        log.warning(
            'HTTP %r error while getting jail report: %r',
            error.code,
            error,
        )
    except (http.client.HTTPException, urllib.error.URLError) as error:
        html = None
        log.warning('Other error while getting jail report: %r', error)
    except TimeoutError:
        html = None
        log.warning('Timeout while getting jail report.')
    return html
Ejemplo n.º 27
0
    def submit(self, task):
        if not task:
            return

        if not self.enabled:
            task.log.info('Task failed to start, Mesos is disabled.')
            task.exited(1)
            return
        self._check_connection()

        mesos_task_id = task.get_mesos_id()
        self.tasks[mesos_task_id] = task
        env = task.get_config()['environment']
        clusterman_resource_str = env.get('CLUSTERMAN_RESOURCES')
        clusterman_metrics = get_clusterman_metrics()
        if clusterman_resource_str and clusterman_metrics:
            clusterman_resources = json.loads(clusterman_resource_str)
            cluster = env.get('EXECUTOR_CLUSTER', env.get('PAASTA_CLUSTER'))
            pool = env.get('EXECUTOR_POOL', env.get('PAASTA_POOL'))
            aws_region = staticconf.read(f'clusters.{cluster}.aws_region',
                                         namespace='clusterman')
            metrics_client = clusterman_metrics.ClustermanMetricsBotoClient(
                region_name=aws_region,
                app_identifier=pool,
            )
            with metrics_client.get_writer(
                    clusterman_metrics.APP_METRICS,
                    aggregate_meteorite_dims=True) as writer:
                for metric_key, metric_value in clusterman_resources.items():
                    writer.send((metric_key, int(time.time()), metric_value))
        self.runner.run(task.get_config())
        log.info(
            'Submitting task {} to {}'.format(
                mesos_task_id,
                self.mesos_address,
            ), )
        task.report_resources()
Ejemplo n.º 28
0
    def ussd_initial(self, request, *args, **kwargs):
        if hasattr(self, 'get_customer_journey_conf'):
            self.customer_journey_conf = self.get_customer_journey_conf(
                request
            )
        if hasattr(self, 'get_customer_journey_namespace'):
            self.customer_journey_namespace = \
                self.get_customer_journey_namespace(request)

        if self.customer_journey_conf is None \
                or self.customer_journey_namespace is None:
            raise MissingAttribute("attribute customer_journey_conf and "
                                   "customer_journey_namespace are required")

        if not self.customer_journey_namespace in \
                staticconf.config.configuration_namespaces:
            load_ussd_screen(
                self.customer_journey_conf,
                self.customer_journey_namespace
            )

        # check if variables exit and have been loaded
        initial_screen = staticconf.read(
            'initial_screen',
            namespace=self.customer_journey_namespace)

        if isinstance(initial_screen, dict) and initial_screen.get('variables'):
            variable_conf = initial_screen['variables']
            file_path = variable_conf['file']
            namespace = variable_conf['namespace']

            # check if it has been loaded
            if not namespace in \
                    staticconf.config.configuration_namespaces:
                load_variables(file_path, namespace)
            self.template_namespace = namespace
Ejemplo n.º 29
0
def generate_biased_data(policy):
    """TODO DESCRIBE THIS
    """
    single_model = _create_initial_model(policy=policy)
    epoch_history = []
    for _ in range(staticconf.read('generate_data.num_epochs')):
        epoch_info = run_single_epoch.run_single_epoch(
            model=single_model,
            actual_policy=policy,
            num_actions=staticconf.read('generate_data.num_actions'),
            num_features=staticconf.read('generate_data.num_features'),
            num_examples=staticconf.read('generate_data.num_examples'),
        )
        epoch_history.append(epoch_info)
    if staticconf.read('generate_data.model_output'):
        # TODO: Implement this shit
        pass
    with open(staticconf.read('generate_data.output_file'), 'w+') as fh:
        for epoch_info in epoch_history:
            fh.write(json.dumps(epoch_info))
            fh.write('\n')
Ejemplo n.º 30
0
def setup_config(config_file: str) -> None:
    staticconf.YamlConfiguration(config_file, flatten=False)
    for backup_name, backup_config in staticconf.read('backups').items():
        staticconf.DictConfiguration(backup_config, namespace=backup_name)
Ejemplo n.º 31
0
def save_mug_shots(inmates):
    """Saves the mug shot image data to a file for each Inmate.

    Mug shots are saved by the Inmate's ID.
    If an image file with the same ID already exists and the new mug shot
    is different, the new mug shot is saved with the current date / time
    appended to the filename.

    Args:
        inmates: List of Inmate objects to be processed.
    """
    path = staticconf.read('path.mug_shot_dir')
    try:
        os.makedirs(path)
    except OSError as e:
        # File/Directory already exists
        if e.errno == errno.EEXIST:
            pass
        else:
            raise
    # Save each inmate's mug shot
    for inmate in inmates:
        # Skip inmates with no mug shot
        if inmate.mug is None:
            log.debug('Skipping inmate-ID %s with no mug shot.', inmate.id)
            continue
        # Check if there is already a mug shot for this inmate
        try:
            old_size = os.path.getsize(os.path.join(path, inmate.id + '.jpg'))
            if old_size == len(inmate.mug):
                log.debug(
                    'Skipping already saved mug shot (ID: %s)',
                    inmate.id,
                )
                continue
            else:
                for filename in os.listdir(path):
                    if (fnmatch.fnmatch(filename, '{}_*.jpg'.format(inmate.id))
                            and os.path.getsize(filename) == len(inmate.mug)):
                        log.debug(
                            'Skipping already saved of mug shot (ID: %s)',
                            inmate.id,
                        )
                        continue
                log.debug(
                    'Saving mug shot under alternate filename (ID: %s)',
                    inmate.id,
                )
                location = os.path.join(
                    path,
                    '{inmate_id}_{timestamp}.jpg'.format(
                        inmate_id=inmate.id,
                        timestamp=datetime.datetime.now().strftime(
                            '%y%m%d%H%M%S',
                        ),
                    ),
                )
        except OSError as e:
            # No such file
            if e.errno == errno.ENOENT:
                old_size = None
                location = os.path.join(
                    path,
                    '{inmate_id}.jpg'.format(inmate_id=inmate.id),
                )
            else:
                raise
        # Save the mug shot
        log.debug(
            'Writing mug shot for inmate-ID %s to: %s',
            inmate.id,
            location,
        )
        with open(location, mode='wb') as f:
            f.write(inmate.mug)
Ejemplo n.º 32
0
import staticconf
from twilio.rest import TwilioRestClient

filename = 'secret.yaml'
staticconf.YamlConfiguration(filename)

account_sid = staticconf.read('account_sid')
auth_token = staticconf.read('auth_token')
to_number = staticconf.read('to')
from_number = staticconf.read('from')

def send_text_message():
	client = TwilioRestClient(account_sid, auth_token)
	 
	message = client.messages.create(body="""Hi Kuba,
Here's a reminder to complete your cleaning schedule. Please text back with confirmation of the completed cleaning within 72 hours. Any problems please call the office on 0800 72 72 72.

Have a great week!

The ReClean Team""",
	    to=to_number,
	    from_=from_number)
	return message.sid
Ejemplo n.º 33
0
def log_most_inmates_count(count):
    """Logs to file the most-count and the current date."""
    now = now = datetime.datetime.now().strftime('%m/%d/%y %H:%M:%S')
    log.info('Logging most inmates count at %s on %s', count, now)
    with open(staticconf.read('path.most_inmate_count'), mode='w') as f:
        f.write('{}\n{}'.format(count, now))
Ejemplo n.º 34
0
logging.basicConfig(
    level=logging.DEBUG,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
)
# Silence unneeded debug statements from boto.
logging.getLogger('boto').setLevel(logging.INFO)
# Don't write config values to the log. We don't use schemas yet.
logging.getLogger('staticconf.config').setLevel(logging.WARNING)

log = logging.getLogger(__name__)

config.load_config()


if staticconf.read('aws.s3.bucket', default=None):
    conn = boto.s3.connect_to_region(
        region_name=staticconf.read('aws.s3.region'),
    )
    bucket = conn.get_bucket(bucket_name=staticconf.read('aws.s3.bucket'))
    log.info('AWS configured to use bucket %r', bucket)
else:
    bucket = None

if staticconf.read('sentry.dsn', default=None):
    sentry_dsn = staticconf.read('sentry.dsn')
    log.info('Sentry logging configured.')
else:
    sentry_dsn = None
sentry_client = raven.Client(dsn=sentry_dsn)
# Send any ERROR level logs to Sentry.