def process(self, ctx, m): self.count = self.count + 1 dolog = True if (self.eval): dolog = parsebool(ctx.interpolate(m, self.eval)) if dolog: logger.log(self.level, ctx.interpolate(m, self.message)) yield m
def process(self, ctx, m): if (self.condition == None): raise Exception("Filter node with no condition.") if (parsebool(ctx.interpolate(m, self.condition))): yield m else: if (self.message): logger.info(ctx.interpolate(m, self.message)) elif (ctx.debug2): logger.debug("Filtering out message") return
def pk(self, ctx): #Returns the primary key mapping. pk_mappings = [] for mapping in self._mappings(ctx): if ("pk" in mapping): if parsebool(mapping["pk"]): pk_mappings.append(mapping) if (len(pk_mappings) > 1): raise Exception("%s has multiple primary keys mapped: %s" % (self, pk_mappings)) elif (len(pk_mappings) == 1): return pk_mappings[0] else: return None
def _ensure_mappings(self, ctx, mappings): for mapping in mappings: mapping["pk"] = (False if (not "pk" in mapping) else parsebool(mapping["pk"])) if (not "column" in mapping): mapping["column"] = mapping["name"] if (not "value" in mapping): mapping["value"] = None if (mapping["pk"] and not "type" in mapping): if (not "value" in mapping or mapping["value"] == None): mapping["type"] = "AutoIncrement" if (not "column" in mapping): mapping["column"] = mapping["name"] if (not "type" in mapping): mapping["type"] = "String" return mappings
def pk(self, ctx): """ Returns the primary key column definitToClauion, or None if none defined. """ if (self._pk == False): pk_cols = [] for col in self.columns: if ("pk" in col): if parsebool(col["pk"]): pk_cols.append(col) if (len(pk_cols) > 1): raise Exception("Table %s has multiple primary keys: %s" % (self.name, pk_cols)) elif (len(pk_cols) == 1): self._pk = pk_cols[0] else: self._pk = None return self._pk
def initialize(self, ctx): super(SQLTable, self).initialize(ctx) ctx.comp.initialize(self.connection) logger.debug("Loading table %s on %s" % (self.name, self)) self.sa_metadata = MetaData() self.sa_table = Table(self.name, self.sa_metadata) # Drop? columns_ex = [] for column in self.columns: # Check for duplicate names if (column["name"] in columns_ex): raise Exception("Duplicate column name %s in %s" % (column["name"], self)) columns_ex.append(column["name"]) # Configure column column["pk"] = False if (not "pk" in column) else parsebool(column["pk"]) if (not "type" in column): column["type"] = "String" #if (not "value" in column): column["value"] = None logger.debug("Adding column %s" % column) self.sa_table.append_column( Column(column["name"], self._get_sa_type(column), primary_key = column["pk"], autoincrement = (True if column["type"] == "AutoIncrement" else False) )) # Check schema # Create if doesn't exist if (not self.connection.engine().has_table(self.name)): logger.info("Creating table %s" % self.name) self.sa_table.create(self.connection.connection())
def initialize(self, ctx): super(Transaction, self).initialize(ctx) self.enabled = parsebool(self.enabled)