class CapsItem(EmbeddedDocument): capability = ReferenceField(Capability) discovered_value = DynamicField() local_value = DynamicField(default=None) def __unicode__(self): return self.capability.name
class EventState(EmbeddedDocument): meta = {'allow_inheritance': True} # Expose these fields now for indexing hostname = DynamicField() ppid = DynamicField() pid = DynamicField() fqdn = DynamicField()
class SeqColDruggabilityParam(EmbeddedDocument): ''' ''' overexpressed = ["stress", "starvation", "infection", "hypoxia"] default_params = [( "essentiality", "Critical for the organism survival (https://www.ncbi.nlm.nih.gov/pubmed/26791267)", "protein", SeqColDruggabilityParamTypes.value, ["true", "false"], "avg", "equal", "true"), ("human_offtarget", """ This score reflects the results of a blastp search of the pathogen protein in the human proteome database (ncbi accession GCF_000001405.36) with the scale 1 - max(alignment identity), so when a protein has no hit in the human proteome, the value is 1, and if it has 2 hits, one with an identity of 0.4 and other with 0.6, the score is 0.4 (human_offtarget = 1 - 0.6, uses the max identity). """.strip(), "protein", SeqColDruggabilityParamTypes.number, None, "max", "<", 0.4), ("hit_in_deg", "Has a hit in Database of Essential Genes", "protein", SeqColDruggabilityParamTypes.value, ["Yes", "No"], "avg", "equal", "Yes")] ''' name,description,target,_type,options,defaultGroupOperation,defaultOperation,defaultValue ''' for cond in overexpressed: default_params.append( ("overexpression_" + cond, "Overexpressed in model of " + cond + " (https://www.ncbi.nlm.nih.gov/pubmed/26791267)", "protein", SeqColDruggabilityParamTypes.value, ["true", "false" ], "avg", "equal", "true")) MAX_OPTIONS = 20 meta = {'allow_inheritance': True, 'strict': False} name = StringField(required=True) description = StringField(default="") type = StringField(default="value", choices=map(lambda x: x, SeqColDruggabilityParamTypes.values)) target = StringField(default="protein") uploader = StringField(default="") options = ListField(DynamicField()) _class = StringField(default="ar.com.bia.entity.SeqCollectionDoc") defaultGroupOperation = StringField(required=False) defaultOperation = StringField(required=False) defaultValue = DynamicField(required=False) def isValid(self): return self.type == SeqColDruggabilityParamTypes.number.value or len( self.options) <= SeqColDruggabilityParam.MAX_OPTIONS def __str__(self): return "%s type='%s' target='%s'" % (self.name, self.type, self.target)
class CapsItem(EmbeddedDocument): capability = ReferenceField(Capability) value = DynamicField() # Source name like "caps", "interface", "manual" source = StringField() scope = StringField() def __str__(self): return self.capability.name @classmethod def get_caps(cls, *args: List["CapsItem"]) -> Dict[str, Any]: """ Consolidate capabilities list and return resulting dict of caps name -> caps value. First appearance of capability overrides later ones. :param args: :return: """ r: Dict[str, Any] = {} for caps in args: for ci in caps: cn = ci.capability.name if cn in r: continue r[cn] = ci.value return r def clean(self): if self.capability: self.value = self.capability.clean_value(self.value)
class BioProperty(DynamicEmbeddedDocument): ''' ''' # field_list = ["_type","property","value","description","url","source"] _type = StringField(required=True) property = StringField() value = DynamicField() description = StringField() url = StringField() source = StringField() def __init__(self, **kwargs): ''' ''' super(DynamicEmbeddedDocument, self).__init__(**kwargs) def __str__(self): return "BioProperty(_type={_type},property={property}, value={value}) ".format( _type=str(self._type), property=str(len(self.property)), value=self.value) def __repr__(self): return self.__str__()
class CapsItem(EmbeddedDocument): capability = ReferenceField(Capability) value = DynamicField() # Source name like "caps", "interface", "manual" source = StringField() def __str__(self): return self.capability.name
class Users(Document): role = EnumField(Role, default=Role.USER) first_name = StringField(min_length=1, max_length=200) last_name = StringField(min_length=1, max_length=200) email = EmailField(allow_utf8_user=True, unique=True) phone_number = StringField(regex=r"\d{3,}") password = DynamicField(max_length=100) created = DateTimeField(default=datetime.utcnow)
class BillingDocument(BDocument): number = StringField(required=True) beneficiary = DynamicField() comment = StringField() doc_date = DateTimeField() attached_files = EmbeddedDocumentListField(AttachedFile) states = EmbeddedDocumentField(DocumentState) # owner= meta = {'abstract': True}
class Metric(EmbeddedDocument): name = StringField(max_length=50, required=True) description = StringField(required=False) value = FloatField(required=False) values = ListField(FloatField(), required=False) labels = ListField(DynamicField(), required=False) def __str__(self): return "Metric: %s (%s) %s" % (self.name, self.description, str(self.value))
class ObjectAttr(EmbeddedDocument): interface = StringField() attr = StringField() value = DynamicField() scope = StringField() def __str__(self): if self.scope: return "%s.%s@%s = %s" % (self.interface, self.attr, self.scope, self.value) return "%s.%s = %s" % (self.interface, self.attr, self.value)
class DetailLine(EmbeddedDocument): code = StringField(required=False) description = StringField() discount = FloatField(default=0) unit_price = FloatField(default=0) qte = FloatField(default=0) tariff = EmbeddedDocumentField(Tariff) line_doc = DynamicField() is_comment = BooleanField(default=False) @property def total_amount(self): return (self.unit_price * self.qte) - self.discount_amount @property def discount_amount(self): return (self.unit_price * self.qte) * self.discount / 100
def __new__(mcs, name, bases, dct): """ Creates new properties that map to the attributes. """ state_cls = type(name + 'State', (EventState, ), {f: DynamicField() for f in dct['fields']}) dct['state'] = EmbeddedDocumentField(state_cls) dct['object_type'] = StringField(default=dct['object_name'], choices=[dct['object_name']]) dct['actions'] = tuple(sorted(dct.pop('actions', []))) dct['fields'] = tuple(sorted(dct.pop('fields', []))) new_cls = super(DataModelEventMeta, mcs).__new__(mcs, name, bases, dct) # Update the list of all objects so that the class can be mentioned by the data model name if dct['object_name']: event_lookup[dct['object_name']] = new_cls return new_cls
class AbstractConfig(BDocument): key = StringField(required=True) value = DynamicField() meta = { 'abstract': True, 'indexes': [ 'key', ] } @queryset_manager def _get_by_key_only(self, queryset, key, create=False): try: return queryset.get(key=key) except self.DoesNotExist: if create: return self(key=key) else: return None @queryset_manager def get_by_key_and_owner(self, queryset, key, owner): return queryset.get(key=key) @queryset_manager def _get_by_key_and_speciality(self, queryset, key, owner): return queryset.get(key=key) @classmethod def get_by_key(cls, key, owner=None, create=False): if owner is None: return cls._get_by_key_only(key, create=create) else: try: return cls.get_by_key_and_owner(key, owner) except cls.DoesNotExist: try: return cls._get_by_key_and_speciality(key, owner) except cls.DoesNotExist: return cls._get_by_key_only(key, create=create)
class BaseConfig(BDocument): key = StringField(required=True) value = DynamicField() # speciality = ReferenceField(MedicalSpeciality, required=False, default=None, null=True) # owner = ReferenceField(Staff, required=False, default=None, null=True) meta = {'abstract': True} @queryset_manager def _get_by_key_only(self, queryset, key, create=False): try: return queryset.filter().get(key=key) except self.DoesNotExist: if create: return self(key=key).save() else: return None @queryset_manager def get_by_key_and_owner(self, queryset, key, owner): return queryset.filter().get(key=key) @queryset_manager def _get_by_key_and_speciality(self, queryset, key, owner): return queryset.filter().get(key=key) @classmethod def get_by_key(cls, key, owner=None, create=False): if owner is None: return cls._get_by_key_only(key, create=create) else: try: return cls.get_by_key_and_owner(key, owner) except cls.DoesNotExist: try: return cls._get_by_key_and_speciality(key, owner) except cls.DoesNotExist: return cls._get_by_key_only(key, create=create)
class Payment(BillingDocument): beneficiary_type = StringField() deadline = DateTimeField() payer = DynamicField() payer_type = StringField() received_amount = FloatField(default=0) payment_mode = EmbeddedDocumentField(PaymentMode) lines = EmbeddedDocumentListField(PaymentLine) @property def consumed_amount(self): return sum(line.encasement_amount for line in self.lines) @property def remaining_amount(self): return self.received_amount - self.consumed_amount @property def payer_name(self): return self.payer.complete_name @property def beneficiary_name(self): return self.beneficiary.complete_name
class PaymentLine(EmbeddedDocument): encasement_amount = FloatField(required=True) total_amount = FloatField(required=True) paid_doc = DynamicField() remaining_amount = FloatField()
class Structure(Document): meta = { 'allow_inheritance': True, 'collection': "structures", 'index_cls': False, 'indexes': [ "name", { "fields": ["organism", "name"] }, { "fields": ["organism", "chains.aln_query.name"] }, { "fields": ["chains.aln_query.name"] }, { "fields": ["seq_collection_id", "name"] }, { "fields": ["seq_collection_name", "name"] }, { "fields": ["_cls", "name"] }, { "fields": ["seq_collection_id", "chains.aln.aln_query"] } ], 'db_alias': 'pdb' } ''' classdocs name, description, organism , seq_collection_id , chains residue_sets file_paths ligands properties qualities ''' name = StringField(required=True) description = StringField() organism = StringField() seq_collection_name = StringField() seq_collection_id = ReferenceField(SeqCollection) chains = ListField(EmbeddedDocumentField(Chain)) residue_sets = ListField(EmbeddedDocumentField(ResidueSet)) pockets = ListField(EmbeddedDocumentField(ResidueSet)) ligands = ListField(EmbeddedDocumentField(Molecule), default=[]) properties = EmbeddedDocumentField(BioProperties) keywords = ListField(StringField(), default=[]) qualities = ListField(EmbeddedDocumentField(StructureQuality)) sndg_index = DynamicField(required=False) def chain(self, chain_name): return [x for x in self.chains if x.name == chain_name][0] def druggability(self): druggabilities = [x.druggability_score for x in self.pockets] if druggabilities: return max(druggabilities) return 0 def new_residue_set(self, compound_type): rs = ResidueSet(name=compound_type, residues=[]) for chain in self.chains: for residue in chain.residues: if residue.compound_type == compound_type: rs.residues.append(residue.chain + "_" + str(residue.resid)) return rs def residue_set(self, rs_name): rss = [x for x in self.residue_sets if x.name == rs_name] if rss: return rss[0] else: return ResidueSet(name=rs_name) def residue_sets_for_type(self, rs_type): return [x for x in self.residue_sets if x.type == rs_type] def has_residue_set(self, rs_name): rss = [x for x in self.residue_sets if x.name == rs_name] return True if len(rss) else False def isResidueFromPocket(self, chain, res_id): for p in self.pockets: if chain + "_" + str(res_id) in p.residues: return True return False def get_pocket_from_residue(self, chain, res_id): for p in self.pockets: if chain + "_" + str(res_id) in p.residues: return p raise Exception("not found: %s in %s pockets" % (chain + "_" + str(res_id), self.name)) def all_intersections(self): residue_sets = self.residue_sets + [self.pockets] sets_of_residues_map = {} for residue_set in residue_sets: for residue in residue_set.residues: if residue not in sets_of_residues_map: sets_of_residues_map[residue] = [] sets_of_residues_map[residue].append(residue_set.name) intersections = {} for residue, sets in sets_of_residues_map.items(): subsets = [tuple(sorted(sets))] if len(sets) > 1 else [] for subset in range(len(sets))[2:]: subsets = subsets + [ tuple(sorted(x)) for x in itertools.combinations(sets, subset) ] subsets = set(subsets) for subset in subsets: if subset not in intersections: intersections[subset] = [] intersections[subset].append(residue) return intersections def has_metal(self): return any( [x.compound_type in PDB_LIGAND_METALS for x in self.ligands]) def quality(self, metric_name): for x in self.qualities: if x.name == metric_name: return x.value return None
class Feature(EmbeddedDocument): _id = ObjectIdField() # TODO poner required=True source = StringField(required=False) evidence = StringField() identifier = StringField() location = EmbeddedDocumentField(Location) type = StringField(max_length=30, required=True) features = ListField(DynamicField()) locus_tag = StringField(required=False) alias = ListField(StringField(), default=[]) aln = EmbeddedDocumentField(SimpleAlignment) qualifiers = DictField(required=False) meta = {'allow_inheritance': True} def __init__(self, **kwargs): ''' ''' super(EmbeddedDocument, self).__init__(**kwargs) self._seq_feature = SeqFeature(FeatureLocation(self.location.start, self.location.end), ref=self.location.base, type=self.type, ref_db=self.identifier) self._instance = None self.features = [] def seq(self, sequence): return sequence.seq[self.location.start:self.location.end] def _compound_name(self, other): return self.identifier + "_" + other.identifier def __contains__(self, other): if self.location.base == other.location.base: return self.location.start <= other.location.start and self.location.end >= other.location.end return False def __and__(self, other): if self.location.base == other.location.base: intersect = sorted( list( set(range(self.location.start, self.location.end)) & set(range(other.location.start, other.location.end)))) if intersect: return Feature(location=Location(start=intersect[0], end=intersect[-1]), source="operation", type="intersect", identifier=self._compound_name(other)) else: return Feature(location=Location(start=-1, end=-1), source="operation", type="intersect", identifier=self._compound_name(other)) else: raise Exception("bases do not match") def __str__(self): return "Feature(ref={ref},location={location}, features={features_count}) ".format( location=str(self.location), features_count=str(len(self.features)), ref=self.location.base) def has_alias(self, name): if self.identifier == name: return True if hasattr(self, "alias"): return name in self.alias return False def __len__(self): return len(self.location)
class InvoiceLine(EmbeddedDocument): details = EmbeddedDocumentListField(DetailLine) description = StringField() is_comment = BooleanField() line_doc = DynamicField()
class SharedGroup(EmbeddedDocument): user = DynamicField() access = ListField(StringField) is_owner = BooleanField(default=False) is_favorite = BooleanField(default=False)
class ViewQuery(EmbeddedDocument): column = StringField() operator = StringField() value = DynamicField() logical_operator = StringField()
class FieldComparison(EmbeddedQueryTerm): field = StringField() value = DynamicField() string_comparator = StringField(db_field='comparator') def __init__(self, field, value, comparator=None, **kwargs): if comparator is not None: self.comparator = comparator kwargs['string_comparator'] = str(comparator.name) super(FieldComparison, self).__init__(field=field, value=value, **kwargs) if comparator is None: self.comparator = FieldComparators[self.string_comparator] def compare(self, event): event_value = event.get(self.field) value = self.value # Cast the value to have the proper type so comparisons will work if isinstance(value, (int, float)) and not isinstance(event_value, (int, float)): try: event_value = type(value)(event_value) except ValueError as e: return False elif isinstance(value, str) and not isinstance(event_value, str): event_value = type(value)(event_value) if isinstance(event_value, str) and isinstance(value, str): event_value = event_value.lower() value = value.lower() if self.comparator == FieldComparators.Equals: return event_value == value elif self.comparator == FieldComparators.NotEquals: return event_value != value elif self.comparator == FieldComparators.Contains: return event_value in value elif self.comparator == FieldComparators.GreaterThan: return event_value > value elif self.comparator == FieldComparators.GreaterThanOrEqual: return event_value >= value elif self.comparator == FieldComparators.LessThan: return event_value < value elif self.comparator == FieldComparators.LessThanOrEqual: return event_value <= value elif self.comparator == FieldComparators.WildCard: regex = re.escape(value).replace(r'\*', '.*') return re.match(regex, event_value) is not None elif self.comparator == FieldComparators.RegEx: return re.match(value, event_value) is not None else: raise NotImplementedError def get_fields(self): return {self.field} def __repr__(self): return '{}({}, {}, {})'.format( type(self).__name__, repr(self.field), self.comparator, repr(self.value)) __str__ = __repr__
class DiscoveryJob(Document): meta = {"collection": "noc.schedules.inv.discovery"} ts = DateTimeField() jcls = StringField() status = StringField(db_field="s") object = IntField(db_field="key") data = DynamicField() schedule = DynamicField() last = DateTimeField() last_status = StringField(db_field="ls") last_duration = FloatField(db_field="ldur") last_success = DateTimeField(db_field="st") runs = IntField(db_field="runs") tb = StringField(db_field="tb") log = StringField() faults = IntField() def __unicode__(self): return "%s %s" % (self.jcls, self.object) @classmethod def install(cls): from noc.sa.models.managedobject import ManagedObject from noc.sa.models.managedobjectprofile import ManagedObjectProfile post_save.connect(cls.on_managed_object_save, sender=ManagedObject) pre_delete.connect(cls.on_managed_object_delete, sender=ManagedObject) post_save.connect(cls.on_objectprofile_save, sender=ManagedObjectProfile) pre_delete.connect(cls.on_objectprofile_delete, sender=ManagedObjectProfile) @classmethod def on_managed_object_save(cls, sender, instance, created, *args, **kwargs): cls.apply_object_jobs(instance) @classmethod def on_managed_object_delete(cls, sender, instance, *args, **kwargs): cls.delete_object_jobs(instance) @classmethod def on_objectprofile_save(cls, sender, instance, created, *args, **kwargs): cls.apply_objectprofile_jobs(instance) @classmethod def on_objectprofile_delete(cls, sender, instance, *args, **kwargs): cls.delete_objectprofile_jobs(instance) @classmethod def apply_object_jobs(cls, object): """ Apply discovery jobs to object """ methods = get_active_discovery_methods() # Get current schedules current = {} # name -> (interval, failed interval) for d in cls._get_collection().find( { "key": object.id, "jcls": { "$in": methods } }, { "jcls": 1, "schedule": 1 }): current[d["jcls"]] = (d["schedule"]["interval"], d["schedule"].get("failed_interval")) # Get effective schedules bulk = cls._get_collection().initialize_unordered_bulk_op() n = 0 p = object.object_profile now = datetime.datetime.now() for m in methods: if not getattr(p, "enable_%s" % m): continue interval = (getattr(p, "%s_max_interval" % m), getattr(p, "%s_min_interval" % m)) if m in current: if current[m] != interval: # Change schedule logger.debug("[%s] changing %s interval %s -> %s", object.name, m, current[m], interval) bulk.find({ "key": object.id, "jcls": m }).update({ "$set": { "schedule.interval": interval[0], "schedule.failed_interval": interval[1] } }) n += 1 else: # Create schedule logger.debug("[%s] creating schedule for %s", object.name, m) bulk.insert({ "jcls": m, "key": object.id, "s": "W", "data": None, "ts": now, "schedule": { "interval": interval[0], "failed_interval": interval[1], "offset": random.random() } }) n += 1 # Delete stale schedules stale = set(current) - set(methods) if stale: logger.debug("[%s] deleting stale schedules: %s", object.name, ", ".join(stale)) bulk.find({ "key": object.id, "$jcls": { "$in": list(stale) } }).remove() n += 1 if n: logger.debug("Bulk update schedule") bulk.execute({"w": 0}) @classmethod def delete_object_jobs(cls, object): logger.debug("[%s] deleting object jobs", object.name) cls._get_collection().remove({"key": object.id}) @classmethod def apply_objectprofile_jobs(cls, profile): """ Apply discovery jobs to all objects """ object_ids = list( profile.managedobject_set.values_list("id", flat=True)) if not object_ids: return methods = get_active_discovery_methods() current = {} # object, method -> (interval, failed interval) for d in cls._get_collection().find( { "key": { "$in": object_ids }, "jcls": { "$in": methods } }, { "jcls": 1, "key": 1, "schedule": 1 }): current[(d["key"], d["jcls"])] = (d["schedule"]["interval"], d["schedule"].get("failed_interval")) # Get effective capabilities bulk = cls._get_collection().initialize_unordered_bulk_op() n = 0 now = datetime.datetime.now() for m in methods: if not getattr(profile, "enable_%s" % m): continue interval = (getattr(profile, "%s_max_interval" % m), getattr(profile, "%s_min_interval" % m)) for obj in object_ids: if (obj, m) in current: if current[(obj, m)] != interval: # Change schedule logger.debug("[%s] changing %s interval %s -> %s", obj, m, current[(obj, m)], interval) bulk.find({ "key": obj, "jcls": m }).update({ "$set": { "schedule.interval": interval[0], "schedule.failed_interval": interval[1] } }) n += 1 del current[(obj, m)] else: # Create schedule logger.debug("[%s] creating schedule for %s", obj, m) bulk.insert({ "jcls": m, "key": obj, "s": "W", "data": None, "ts": now, "schedule": { "interval": interval[0], "failed_interval": interval[1], "offset": random.random() } }) n += 1 # Delete stale schedules for obj, m in current: logger.debug("[%s] deleting stale schedule: %s", obj, m) bulk.find({"key": obj, "jcls": m}).remove() n += 1 if n: logger.debug("Bulk update schedule") bulk.execute({"w": 0}) @classmethod def delete_objectprofile_jobs(cls, profile): object_ids = list( profile.managedobject_set.values_list("id", flat=True)) if not object_ids: return methods = get_active_discovery_methods() bulk = cls._get_collection().initialize_unordered_bulk_op() for m in methods: logger.debug("[%s] Deleting stale schedules for %s", profile.name, m) bulk.find({"jcls": m, "key": {"$in": object_ids}}) bulk.execute({"w": 0}) @classmethod def set_deferred(cls, object): logger.debug("Setting deferred discovery status for %s", object) cls._get_collection().update( { "key": object.id, Scheduler.ATTR_STATUS: Scheduler.S_WAIT }, {"$set": { Scheduler.ATTR_STATUS: Scheduler.S_DISABLED }}, multi=True) @classmethod def reset_deferred(cls, object): logger.debug("Resetting deferred discovery status for %s", object) cls._get_collection().update( { "key": object.id, Scheduler.ATTR_STATUS: Scheduler.S_DISABLED }, {"$set": { Scheduler.ATTR_STATUS: Scheduler.S_WAIT }}, multi=True)