Beispiel #1
0
    def enqueue_initial_messages(self,
                                 sub_key,
                                 topic_name,
                                 endpoint_name,
                                 _group_size=20):
        """ Looks up any messages for input task in the database and pushes them all and enqueues in batches any found.
        """
        with self.sub_key_locks[sub_key]:

            pub_time_max = utcnow_as_ms()
            session = None

            try:

                # One SQL session for all queries
                session = self.pubsub.server.odb.session()

                # Get IDs of any messages already queued up so as to break them out into batches of messages to fetch
                msg_ids = self.pubsub.get_initial_sql_msg_ids_by_sub_key(
                    session, sub_key, pub_time_max)
                msg_ids = [elem.pub_msg_id for elem in msg_ids]

                if msg_ids:
                    len_msg_ids = len(msg_ids)
                    suffix = ' ' if len_msg_ids == 1 else 's '
                    groups = list(grouper(_group_size, msg_ids))
                    len_groups = len(groups)

                    # This we log using both loggers because we run during server startup so we should
                    # let users know that their server have to do something extra
                    for _logger in logger, logger_zato:
                        _logger.info(
                            'Found %d initial message%sto enqueue for sub_key:`%s` (%s -> %s), `%s`, g:%d, gs:%d',
                            len_msg_ids, suffix, sub_key, topic_name,
                            endpoint_name, msg_ids, len(groups), _group_size)

                    for idx, group in enumerate(groups, 1):
                        group_msg_ids = [elem for elem in group if elem]
                        logger.info(
                            'Enqueuing group %d/%d (gs:%d) (%s, %s -> %s) `%s`',
                            idx, len_groups, _group_size, sub_key, topic_name,
                            endpoint_name, group_msg_ids)

                        msg_list = self.pubsub.get_sql_messages_by_msg_id_list(
                            session, sub_key, pub_time_max, group_msg_ids)
                        self._enqueue_gd_messages_by_sub_key(sub_key, msg_list)

            except Exception:
                for _logger in logger, logger_zato:
                    _logger.warn(
                        'Could not enqueue initial messages for `%s` (%s -> %s), e:`%s`',
                        sub_key, topic_name, endpoint_name, format_exc())

            finally:
                if session:
                    session.close()
Beispiel #2
0
 def handle(self):
     result = self.translate(self.request.input.system1, self.request.input.key1, self.request.input.value1, 
         self.request.input.system2, self.request.input.key2)
     
     if result:
         self.response.payload.value2 = result.decode('utf-8')
         self.response.payload.repr = repr(result)
         self.response.payload.hex = ' '.join([elem1+elem2 for (elem1, elem2) in grouper(2, result.encode('hex'))])
         self.response.payload.sha1 = sha1(result).hexdigest()
         self.response.payload.sha256 = sha256(result).hexdigest()
Beispiel #3
0
 def handle(self):
     
     # Grab month and year from user-provided input or use defaults, i.e. current date,
     # note that day is not needed so it's discarded 
     year, month, _ = get_date(self.request.input)
     
     # Fetch connection by its name
     out = self.outgoing.plain_http.get('treasury.gov')
     
     # Build a query string the backend data source expects
     query_string = {
         '$filter': 'month(QUOTE_DATE) eq {} and year(QUOTE_DATE) eq {}'.format(month, year)
     }
     
     # Invoke the backend with query string, fetch the response as a UTF-8 string
     # and turn it into an XML object
     response = out.conn.get(self.cid, query_string)
     response = response.text.encode('utf-8')
     xml = etree.fromstring(response)
     
     # Look up all XML elements needed (date and rate) using XPath
     elements = xml.xpath('//m:properties/d:*/text()', namespaces=NAMESPACES)
     
     # elements is a flat list that needs to be turned into pairs using the 'grouper'
     # function before iterating over
     elements = grouper(2, elements)
     
     for date, rate in elements:
         
         # Create a date object out of string
         date = parse(date)
         
         # Build a key for Redis and store the data under it
         key = REDIS_KEY_PATTERN.format(date.year, str(date.month).zfill(2), str(date.day).zfill(2))
         self.logger.info("redis key : %s"%key)
         self.kvdb.conn.set(key, rate)
         
         # Leave a trace of our activity
         self.logger.info('Key %s set to %s', key, rate)
Beispiel #4
0
    def handle(self):
        result = self.translate(
            self.request.input.system1,
            self.request.input.key1,
            self.request.input.value1,
            self.request.input.system2,
            self.request.input.key2,
        )

        if result:
            self.response.payload.value2 = result.decode("utf-8")
            self.response.payload.repr = repr(result)
            self.response.payload.hex = " ".join([elem1 + elem2 for (elem1, elem2) in grouper(2, result.encode("hex"))])
            self.response.payload.sha1 = sha1(result).hexdigest()
            self.response.payload.sha256 = sha256(result).hexdigest()
Beispiel #5
0
def generate_password(length=16):
    chain = MarkovChain(
        c for c in japanese.lower() + occitan.lower() + breton.lower() if c in string.ascii_lowercase
    )
    return '-'.join(''.join(elems) for elems in (grouper(4, ''.join(itertools.islice(chain, length)))))