コード例 #1
0
 def load_cached_data(self):
     if self.interact_precomputed_data is None:
         interact_data = self.context["o_state"].get_state(
             "interact.precomputed", direct=True)
         #log.info("interact_data=%s" % interact_data)
         if interact_data is not None:
             log.log(log.NOTICE, "Loading Interact precomputed data...")
             self.interact_precomputed_data = misc.decode_json(
                 interact_data)
コード例 #2
0
 def persist_aggregates():
     global all_kv_objects
     seconds_from_epoch = misc.seconds_from_epoch_utc()
     for t in all_kv_objects:
         if t.table_cache is None or not t.is_kv_table or len(
                 t.aggregates) == 0:
             continue
         log.debug("Persisting aggregates for KV table '%s'..." %
                   t.table_name)
         xray_recorder.begin_subsegment("persist_aggregates.%s" %
                                        t.table_name)
         for aggregate in t.aggregates:
             serialized = []
             ttl = 0
             compress = aggregate[
                 "Compress"] if "Compress" in aggregate else False
             prefix = aggregate["Prefix"]
             t._safe_key_import(serialized, aggregate, t.table_cache)
             for i in serialized:
                 ttl = max(
                     ttl, i["ExpirationTime"] -
                     seconds_from_epoch) if "ExpirationTime" in i else ttl
             if len(serialized) == 0:
                 ttl = aggregate["DefaultTTL"]
             if log.level == log.DEBUG:
                 log.log(
                     log.NOTICE,
                     "Delta between aggregate '%s' in DynamoDB and the new one:"
                     % prefix)
                 #if misc.encode_json(serialized, compress=compress) == t.get_kv(prefix): pdb.set_trace()
                 if KVTable.compare_kv_list(
                         serialized,
                         misc.decode_json(t.get_kv(prefix))) == 0:
                     pass  #pdb.set_trace()
                 log.log(log.NOTICE, "Delta end.")
             t.set_kv(prefix,
                      misc.encode_json(serialized, compress=compress),
                      TTL=ttl)
         if t.table_cache_dirty:
             t.set_kv("cache.last_write_index", t.context["now"], TTL=0)
         xray_recorder.end_subsegment()
コード例 #3
0
 def get_state_json(self, key, default=None, direct=False, TTL=None):
     try:
         v = misc.decode_json(self.get_state(key, direct=direct, TTL=TTL))
         return v if v is not None else default
     except:
         return default
コード例 #4
0
 def get_state_json(self, key, default=None, direct=False):
     try:
         return misc.decode_json(
             self.get_state(key, default=default, direct=direct))
     except:
         return default
コード例 #5
0
    def reread_table(self, force_reread=False):
        if not force_reread and self.table_cache is not None:
            return

        now = self.context["now"]
        misc.initialize_clients(["dynamodb"], self.context)
        client = self.context["dynamodb.client"]

        self.table_last_read_date = now

        # Get table schema
        response = client.describe_table(TableName=self.table_name)
        self.table_schema = response["Table"]
        schema = self.table_schema["KeySchema"]
        self.is_kv_table = len(
            schema) == 1 and schema[0]["AttributeName"] == "Key"

        # Read all the table into memory
        table_content = None
        try:
            table_content = misc.dynamodb_table_scan(client, self.table_name)
        except Exception as e:
            log.exception("Failed to scan '%s' table: %s" %
                          (self.table_name, e))
            raise e

        table_cache = []
        # Extract aggregates when encountering them
        for record in table_content:
            if "Key" not in record:
                table_cache.append(record)
                continue
            key = record["Key"]
            if "Value" not in record:
                log.warn(
                    "Key '%s' specified but missing 'Value' column in configuration record: %s"
                    % (key, record))
                continue
            value = record["Value"]

            aggregate = next(
                filter(lambda a: key == a["Prefix"], self.aggregates), None)
            if aggregate is not None:
                agg = []
                try:
                    agg = misc.decode_json(value)
                except Exception as e:
                    log.debug(
                        "Failed to decode JSON aggregate for key '%s' : %s / %s "
                        % (key, value, e))
                    continue
                agg.append(record)
                self._safe_key_import(table_cache,
                                      aggregate,
                                      agg,
                                      exclude_aggregate_key=False)
            else:
                aggregate = next(
                    filter(lambda a: self.is_aggregated_key(key),
                           self.aggregates), None)
                if aggregate:
                    log.debug(
                        "Found a record '%s' that should belong to an aggregate. Ignoring it!"
                        % key)
                    continue
                self._safe_key_import(table_cache, aggregate, [record])
        # Clean the table of outdated record (TTL based)
        self.table_cache = []
        for r in table_cache:
            if "ExpirationTime" not in r:
                self.table_cache.append(r)
                continue
            expiration_time = misc.seconds2utc(r["ExpirationTime"])
            if expiration_time is None or expiration_time > now:
                self.table_cache.append(r)
            else:
                if self.is_kv_table:
                    log.debug("Wiping outdated item '%s'..." % r["Key"])
                    client.delete_item(Key={
                        'Key': {
                            'S': r["Key"]
                        },
                    },
                                       TableName=self.table_name)

        # Build an easier to manipulate dict of all the data
        self._build_dict()