class APIKey(Document): """ This class represents an API key. It has it's own unique set of permissions, and an owner. """ meta = { 'collection': COLLECTION_APIKEYS, 'indexes': [{ 'fields': ['key'], 'unique': True }] } key = StringField(required=True, null=False, unique=True, max_length=MAX_BIGSTR_LEN) owner = StringField(required=True, null=False, max_length=MAX_STR_LEN) allowed_api_calls = ListField(StringField(required=True, null=False, max_length=MAX_STR_LEN), required=True, null=False) @staticmethod def list_keys(owner): """ List the keys for a user. """ return APIKey.objects(owner=owner) # pylint: disable=no-member @staticmethod def get_key(key): """ Query for a key from the database. """ return APIKey.objects.get(key=bcrypt.hash(key, salt=API_KEY_SALT)) # pylint: disable=no-member @property def document(self): """ Returns a document for this object. Does not include the API key itself. """ return { 'owner': self.owner, 'allowed_api_calls': self.allowed_api_calls, } def is_permitted(self, api_method): """ Determines if the API key has permissions to execute an API method. """ if api_method in self.allowed_api_calls: # pylint: disable=unsupported-membership-test return True if '*' in self.allowed_api_calls: # pylint: disable=unsupported-membership-test return True return False def remove(self): """ Remove this document from the database, and perform any related cleanup. """ self.delete()
class ComputedField(EmbeddedDocument): name = StringField(required=True) type = StringField(null=True) formula = DictField(required=True)
class Datalab(Document): # Cascade delete if container is deleted container = ReferenceField(Container, required=True, reverse_delete_rule=2) name = StringField(required=True) description = StringField(null=True) steps = EmbeddedDocumentListField(Module) order = EmbeddedDocumentListField(Column) charts = EmbeddedDocumentListField(Chart) relations = ListField(DictField()) permitted_users = ListField(StringField()) ltiAccess = BooleanField(default=False) emailAccess = BooleanField(default=False) permission = StringField(null=True) restriction = StringField(choices=("private", "open"), default="private") groupBy = StringField(null=True) @property def data(self): from form.models import Form from .utils import calculate_computed_field build_fields = [] combined_data = pd.DataFrame(self.relations) # # Gather all tracking and feedback data for associated actions # # Consumed by the computed column # tracking_feedback_data = {} # if datalab_id: # actions = Workflow.objects(datalab=datalab_id) # for action in actions: # action_id = str(action.id) # if not "emailSettings" in action or not len(action["emailJobs"]): # continue # tracking_feedback_data[action_id] = { # "email_field": action["emailSettings"]["field"], # "jobs": {}, # } # for email_job in action["emailJobs"]: # job_id = str(email_job.job_id) # tracking_feedback_data[action_id]["jobs"][job_id] = { # "tracking": { # email["recipient"]: email["track_count"] # for email in email_job["emails"] # } # } for step_index, step in enumerate(self.steps): if step.type == "datasource": step = step.datasource try: datasource = Datasource.objects.get(id=step.id) except: pass try: datasource = Datalab.objects.get(id=step.id) except: pass build_fields.append([step.labels[field] for field in step.fields]) included_fields = [] for field in step.fields: # Skip columns that are already included in the relations table if step.labels[field] in list(combined_data): continue if step.types.get(field) != "checkbox-group": included_fields.append(field) continue # Identify which form the field comes from form_module_index = next( item for item in datasource.order if item.field == field ).stepIndex form_module_id = datasource.steps[form_module_index].form form = Form.objects.get(id=form_module_id) for form_field in form.fields: if form_field.name == field: included_fields.extend(form_field.columns) data = ( pd.DataFrame(data=datasource.data) .set_index(step.primary) .filter(items=included_fields) .rename( columns={field: step.labels[field] for field in step.fields} ) ) combined_data = combined_data.join( data, on=step.matching if step_index != 0 else step.labels.get(step.primary, step.primary), ) elif step.type == "form": form = Form.objects.get(id=step.form) data = pd.DataFrame(data=form.data) build_fields.append([field.name for field in form.fields]) if form.primary in data: data.set_index(form.primary, inplace=True) combined_data = combined_data.join(data, on=form.primary) elif step.type == "computed": step = step.computed build_fields.append([field.name for field in step.fields]) computed_fields = { field.name: combined_data.apply( lambda item: calculate_computed_field( field.formula, item, build_fields, {} ), axis=1, ).values for field in step.fields } combined_data = combined_data.assign(**computed_fields) combined_data.replace({pd.np.nan: None}, inplace=True) return combined_data.to_dict("records") def filter_details(self, filters): """ Function used in Serializers to get filter_details Input filters - Table Filter Details Output filter_details - Contains filtered data & other information - dataNum: Number of rows in data - paginationTotal: Number of rows in data (used for pagination) - filters: - Column Label with list of { text, value } for each column - filteredData: - The actual table data - groups: List of {text value} for groupby dropdown (essentially another filter) """ data = self.data if filters is None: filters = {} df = pd.DataFrame.from_dict(data) # Grab Column Information to help with filtering because the filter algorithm depends on the column type from datalab.serializers import OrderItemSerializer columns = OrderItemSerializer( self.order, many=True, context={"steps": self.steps} ).data group_column = next(column for column in columns if column['details']['label'] == self.groupBy) if self.groupBy is not None else None # Perform Actual Filtering filtered_data, pagination_total = get_filtered_data(data, columns, filters, self.groupBy) return { 'dataNum': len(data), 'paginationTotal': pagination_total, 'filters': get_filters(df, columns), 'filteredData': filtered_data, 'groups': get_column_filter(df, group_column) } # Flat representation of which users should see this DataLab when they load the dashboard def refresh_access(self): users = set( record.get(self.permission, "").lower() for record in self.relations ) for invalid_value in [None, ""]: if invalid_value in users: users.remove(invalid_value) self.permitted_users = list(users) self.save()
class Action(Document): meta = { "collection": "noc.actions", "strict": False, "auto_create_index": False, "json_collection": "sa.actions", } uuid = UUIDField(unique=True) name = StringField(unique=True) label = StringField() description = StringField() access_level = IntField(default=15) # Optional handler for non-sa actions handler = StringField() # params = ListField(EmbeddedDocumentField(ActionParameter)) _id_cache = cachetools.TTLCache(1000, ttl=60) def __str__(self): return self.name @classmethod @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock) def get_by_id(cls, id): return Action.objects.filter(id=id).first() def get_json_path(self): return "%s.json" % quote_safe_path(self.name) @property def json_data(self): r = { "name": self.name, "$collection": self._meta["json_collection"], "uuid": self.uuid, "label": self.label, "description": self.description, "access_level": self.access_level, } if self.handler: r["handler"] = self.handler r["params"] = [c.json_data for c in self.params] return r def to_json(self): return to_json( self.json_data, order=[ "name", "$collection", "uuid", "label", "description", "access_level", "handler", "params", ], ) def get_commands(self, obj): """ Returns ActionCommands instance or None :param obj: Managed Object """ from .actioncommands import ActionCommands for ac in ActionCommands.objects.filter( action=self, profile=obj.profile.id).order_by("preference"): if not ac.match: return ac for m in ac.match: if (not m.platform_re or (obj.platform and re.search(m.platform_re, obj.platform.name))) and ( not m.version_re or (obj.version and re.search(m.version_re, obj.version.version))): return ac return None def expand_ex(self, obj, **kwargs): ac = self.get_commands(obj) if not ac: return None, None # Render template loader = jinja2.DictLoader({"tpl": ac.commands}) env = jinja2.Environment(loader=loader) template = env.get_template("tpl") return ac, template.render(**self.clean_args(obj, **kwargs)) def expand(self, obj, **kwargs): return self.expand_ex(obj, **kwargs)[1] def execute(self, obj, **kwargs): """ Execute commands """ ac, commands = self.expand_ex(obj, **kwargs) if commands is None: return None # Execute rendered commands if ac.config_mode: return obj.scripts.configure(commands=commands) else: return obj.scripts.commands(commands=commands) def clean_args(self, obj, **kwargs): args = {} for p in self.params: if p.name not in kwargs and p.is_required and not p.default: raise ValueError("Required parameter '%s' is missed" % p.name) v = kwargs.get(p.name, p.default) if v is None: continue if p.type == "int": # Integer type try: v = int(v) except ValueError: raise ValueError( "Invalid integer in parameter '%s': '%s'" % (p.name, v)) elif p.type == "float": # Float type try: v = float(v) except ValueError: raise ValueError("Invalid float in parameter '%s': '%s'" % (p.name, v)) elif p.type == "interface": # Interface try: v = obj.get_profile().convert_interface_name(v) except Exception: raise ValueError( "Invalid interface name in parameter '%s': '%s'" % (p.name, v)) elif p.type == "ip": # IP address try: v = IP.prefix(v) except ValueError: raise ValueError("Invalid ip in parameter '%s': '%s'" % (p.name, v)) elif p.type == "vrf": if isinstance(v, VRF): pass elif isinstance(v, six.integer_types): try: v = VRF.objects.get(id=v) except VRF.DoesNotExist: raise ValueError( "Unknown VRF in parameter '%s': '%s'" % (p.name, v)) elif isinstance(v, six.string_types): try: v = VRF.objects.get(name=v) except VRF.DoesNotExist: raise ValueError( "Unknown VRF in parameter '%s': '%s'" % (p.name, v)) else: raise ValueError("Unknown VRF in parameter '%s': '%s'" % (p.name, v)) args[str(p.name)] = v return args
class Image(EmbeddedDocument): path = StringField(required=True)
class Target(Document): """ This class represents a target system. It stores facts about the system, any sessions, as well as additional settings like groups. It's status is represented as the best status of all sessions associated with this target. """ meta = { "collection": COLLECTION_TARGETS, "indexes": [{ "fields": ["name"], "unique": True }, { "fields": ["uuid"], "unique": True }], } name = StringField(required=True, null=False, max_length=MAX_STR_LEN, unique=True) uuid = StringField(required=True, null=False, max_length=MAX_BIGSTR_LEN, unique=True) public_ips = ListField(StringField(required=True, null=False, max_length=MAX_STR_LEN), required=False) facts = DictField(null=False) _session_cache = None @staticmethod def get_by_name(name): """ This method queries for the target object matching the name provided. """ return Target.objects.get(name__iexact=name) # pylint: disable=no-member @staticmethod def get_by_uuid(uuid): """ This method queries for the target object matching the mac_addrs provided. """ return Target.objects.get(uuid__iexact=uuid) # pylint: disable=no-member @staticmethod def list_targets(params): """ This method queries for all target objects. """ # Attempt to limit the data retrieved fields = ["name", "uuid", "public_ips"] if params.get("include_facts", False): fields += ["facts"] return Target.objects().only(*fields) # pylint: disable=no-member @property def sessions(self): """ This property returns all session objects that are associated with this target. Archive any sessions that have not been seen in a long period of time. """ sessions = list( Session.objects(target_name=self.name, archived=False) # pylint: disable=no-member ) for session in sessions: threshold = session.timestamp + (session.interval + session.interval_delta) threshold *= SESSION_ARCHIVE_MODIFIER if time.time() > threshold: session.archive() sessions.remove(session) self._session_cache = sessions return sessions @property def credentials(self): """ This property returns all valid credentials for a target. """ return list( Credential.objects(valid=True, target_name=self.name) # pylint: disable=no-member ) @property def status(self): """ This property returns the target status, which is calculated as the best of all it's session statuses. """ best_status = SESSION_STATUSES.get("inactive", "inactive") active = SESSION_STATUSES.get("active", "active") missing = SESSION_STATUSES.get("missing", "missing") for session in self.sessions: # pylint: disable=not-an-iterable if session.status == active: return active elif session.status == missing: best_status = missing return best_status @property def lastseen(self): """ This function returns the last seen time of the target, which is calculated as the minimum of it's Session timestamps. This function will return -1 if the target has never been seen. """ sessions = self.sessions if sessions: return min([session.timestamp for session in sessions]) # pylint: disable=not-an-iterable return -1 def document(self, include_status=True, include_facts=False, include_sessions=False): """ This property returns a filtered JSON document representation of the target. """ doc = { "name": self.name, "uuid": self.uuid, "public_ips": self.public_ips } if include_status: doc["status"] = self.status doc["lastseen"] = self.lastseen if include_facts: doc["facts"] = self.facts if include_sessions: sessions = self._session_cache if self._session_cache else self.sessions doc["sessions"] = [session.document for session in sessions] return doc def set_facts(self, facts): """ This method sets the facts dictionary for a target. """ for key, value in facts.items(): self.facts[key] = value # pylint: disable=unsupported-assignment-operation self.save() def add_public_ip(self, public_ip): """ Associate a public ip with the target. """ if public_ip not in self.public_ips: # pylint: disable=unsupported-membership-test self.public_ips.append(public_ip) # pylint: disable=no-member self.save() def remove(self): """ Remove this document from the database, and perform any related cleanup. """ self.delete()
class ConnectionType(Document): """ Equipment vendor """ meta = { "collection": "noc.connectiontypes", "strict": False, "auto_create_index": False, "indexes": ["extend", "data", "c_group"], "json_collection": "inv.connectiontypes", "json_unique_fields": ["name"] } name = StringField(unique=True) is_builtin = BooleanField(default=False) description = StringField() # Type extends another type, if not null extend = PlainReferenceField("self", required=False) # List of available genders genders = StringField( choices=[ "s", # Genderless connection "ss", # Genderless connection 2 or more objects "m", # Only male type "f", # Only female type "mmf", # female, 1 or more males "mf", # male-female "mff" # male, 2 or more females ], default="mf") # ModelData data = DictField(default={}) # Compatible group # Connection compatible with opposite gender of same type # and all types having any c_group c_group = ListField(StringField()) uuid = UUIDField(binary=True) OPPOSITE_GENDER = {"s": "s", "m": "f", "f": "m"} category = ObjectIdField() def __unicode__(self): return self.name @property def json_data(self): r = { "name": self.name, "$collection": self._meta["json_collection"], "uuid": self.uuid, "description": self.description, "genders": self.genders, "c_group": self.c_group } if self.extend: r["extend__name"] = self.extend.name return r def to_json(self): return to_json(self.json_data, order=["name", "$collection", "uuid", "description"]) def get_json_path(self): p = [quote_safe_path(n.strip()) for n in self.name.split("|")] return os.path.join(*p) + ".json" def get_effective_data(self): """ Calculate effective data :return: """ raise NotImplementedError def get_superclasses(self): s = [] c = self while c: c = c.extend if c: s += [c] return s def get_subclasses(self): s = [] for c in ConnectionType.objects.filter(extend=self.id): s += [c] + c.get_subclasses() return s def get_inheritance_path(self, other): s = [] # Upward direction c = self while c: s.insert(0, c) if other.id == c.id: return s c = c.extend # Not found, try downward direction s = [] c = other while c: s.insert(0, c) if self.id == c.id: return s c = c.extend return s def get_by_c_group(self): c_group = self.c_group if not c_group: return [] r = [] for ct in ConnectionType.objects.filter(c_group__in=c_group): if ct.id != self.id: r += [ct] return r def get_compatible_types(self, gender): r = [] og = self.OPPOSITE_GENDER[gender] # Add self type if opposige gender allowed if og in self.genders: r += [self.id] if gender in ["m", "s"]: # Add superclasses for c in self.get_superclasses(): if og in c.genders: r += [c.id] if gender in ["f", "s"]: # Add subclasses for c in self.get_subclasses(): if og in c.genders: r += [c.id] if self.c_group: for c in self.get_by_c_group(): if og in c.genders: r += [c.id] return r
class TaxEDoc(EmbeddedDocument): tid = FloatField() superkingdom = StringField() name = StringField()
class DataUpload(EmbeddedDocument): name = StringField() timestamp = DateTimeField(default=datetime.datetime.now) uploader = StringField() errors = ListField(StringField()) properties = ListField(StringField())
class AnnotationPipelineResult(EmbeddedDocument): name = StringField(required=True) timestamp = DateTimeField(required=False, default=datetime.datetime.now) version = IntField(default=0) inputs = DictField() results = DictField()
class StrainProp(EmbeddedDocument): name = StringField(required=True) description = StringField(default="") category = StringField(default="") options = ListField(StringField(), default=[]) user = StringField(default="demo")
class Projects(Document): __project_regex__ = "^[a-zA-Z0-9_]{3,31}$" project = StringField( min_length=3, max_length=30, regex=__project_regex__, primary_key=True, help_text=f"project name/slug (valid format: `{__project_regex__}`)", ) is_public = BooleanField(required=True, default=False, help_text="public/private project") title = StringField( min_length=5, max_length=30, required=True, unique=True, help_text="short title for the project/dataset", ) long_title = StringField( min_length=5, max_length=55, help_text="optional full title for the project/dataset", ) authors = StringField(required=True, help_text="comma-separated list of authors" # TODO set regex to enforce format ) description = StringField( min_length=5, max_length=1500, required=True, help_text="brief description of the project", ) urls = MapField(URLField(null=True), required=True, help_text="list of URLs for references") other = DictField(help_text="other information", null=True) owner = EmailField(required=True, unique_with="project", help_text="owner / corresponding email") is_approved = BooleanField(required=True, default=False, help_text="project approved?") meta = { "collection": "projects", "indexes": ["is_public", "owner", "is_approved"] } @classmethod def post_save(cls, sender, document, **kwargs): admin_email = current_app.config["MAIL_DEFAULT_SENDER"] admin_topic = current_app.config["MAIL_TOPIC"] if kwargs.get("created"): ts = current_app.config["USTS"] email_project = [document.owner, document.project] token = ts.dumps(email_project) scheme = "http" if current_app.config["DEBUG"] else "https" link = url_for("projects.applications", token=token, _scheme=scheme, _external=True) subject = f'New project "{document.project}"' hours = int(current_app.config["USTS_MAX_AGE"] / 3600) html = render_template("admin_email.html", doc=document, link=link, hours=hours) send_email(admin_topic, subject, html) resp = sns_client.create_topic( Name=f"mpcontribs_{document.project}", Attributes={"DisplayName": f"MPContribs {document.title}"}, ) sns_client.subscribe(TopicArn=resp["TopicArn"], Protocol="email", Endpoint=document.owner) else: set_keys = document._delta()[0].keys() if "is_approved" in set_keys and document.is_approved: subject = f'Your project "{document.project}" has been approved' if current_app.config["DEBUG"]: portal = "http://localhost:" + os.environ["PORTAL_PORT"] else: portal = "https://" + os.environ["PORTAL_CNAME"] html = render_template( "owner_email.html", approved=True, admin_email=admin_email, host=portal, ) topic_arn = ":".join( admin_topic.split(":")[:-1] + ["mpcontribs_" + document.project]) send_email(topic_arn, subject, html) if set_keys: # import here to avoid circular from mpcontribs.api.contributions.document import Contributions from mpcontribs.api.notebooks.document import Notebooks from mpcontribs.api.cards.document import Cards contributions = Contributions.objects.only("pk").filter( project=document.project) Notebooks.objects(contribution__in=contributions).delete() Cards.objects(contribution__in=contributions).delete() @classmethod def post_delete(cls, sender, document, **kwargs): admin_email = current_app.config["MAIL_DEFAULT_SENDER"] admin_topic = current_app.config["MAIL_TOPIC"] subject = f'Your project "{document.project}" has been deleted' html = render_template("owner_email.html", approved=False, admin_email=admin_email) topic_arn = ":".join( admin_topic.split(":")[:-1] + ["mpcontribs_" + document.project]) send_email(topic_arn, subject, html) sns_client.delete_topic(TopicArn=topic_arn) @classmethod def pre_save_post_validation(cls, sender, document, **kwargs): document.other = validate_data(document.other) if len(document.urls) > 5: raise ValidationError( {"error": f"too many URL references (max. 5)"}) for label in document.urls.keys(): len_label = len(label) if len_label < 3 or len_label > 8: raise ValidationError({ "error": f"length of URL label {label} should be 3-8 characters" }) for char in label: if char in invalidChars: raise ValidationError( {"error": f"invalid character '{char}' in {label}"})
class Role(Document, RoleMixin): name = StringField(max_length=255) description = StringField(max_length=255) def __repr__(self): return '<Role %r>' % self.name
class Code(Document): access_token = StringField() expires_in = IntField() code = StringField()
class MetricType(Document): meta = { "collection": "noc.metrictypes", "strict": False, "auto_create_index": False, "json_collection": "pm.metrictypes", "json_depends_on": ["pm.metricscopes"], "json_unique_fields": ["name"], } # Metric type name, i.e. Interface | Load | In name = StringField(unique=True) # Global ID uuid = UUIDField(binary=True) # Metric scope reference scope = PlainReferenceField(MetricScope) # Database field name field_name = StringField() # Database field type field_type = StringField(choices=[ ("UInt8", "UInt8"), ("Int8", "Int8"), ("UInt16", "UInt16"), ("Int16", "Int16"), ("UInt32", "UInt32"), ("Int32", "Int32"), ("UInt64", "UInt64"), ("Int64", "Int64"), ("Float32", "Float32"), ("Float64", "Float64"), ("String", "String"), ]) # Text description description = StringField(required=False) # Measure name, like 'kbit/s' # Compatible to Grafana measure = StringField() # Optional required capability required_capability = PlainReferenceField(Capability) # Object id in BI, used for counter context hashing bi_id = LongField(unique=True) # category = ObjectIdField() _id_cache = cachetools.TTLCache(maxsize=100, ttl=60) _name_cache = cachetools.TTLCache(maxsize=100, ttl=60) _bi_id_cache = cachetools.TTLCache(maxsize=100, ttl=60) def __str__(self): return self.name @property def json_data(self): r = { "name": self.name, "$collection": self._meta["json_collection"], "uuid": self.uuid, "scope__name": self.scope.name, "field_name": self.field_name, "field_type": self.field_type, "description": self.description, "measure": self.measure, } if self.required_capability: r["required_capability__name"] = self.required_capability.name return r def to_json(self): return to_json( self.json_data, order=[ "name", "$collection", "uuid", "scope__name", "field_name", "field_type", "description", "measure", "vector_tag", ], ) def get_json_path(self): p = [quote_safe_path(n.strip()) for n in self.name.split("|")] return os.path.join(*p) + ".json" @classmethod @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock) def get_by_id(cls, id): return MetricType.objects.filter(id=id).first() @classmethod @cachetools.cachedmethod(operator.attrgetter("_name_cache"), lock=lambda _: id_lock) def get_by_name(cls, name): return MetricType.objects.filter(name=name).first() @classmethod @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"), lock=lambda _: id_lock) def get_by_bi_id(cls, id): return MetricType.objects.filter(bi_id=id).first() def on_save(self): call_later("noc.core.clickhouse.ensure.ensure_all_pm_scopes", scheduler="scheduler", delay=30) def clean_value(self, value): return getattr(self, "clean_%s" % self.field_type)(value) @staticmethod def clean_UInt8(value): try: v = int(value) except ValueError: raise if v < 0 or v > 255: raise ValueError("Value out of range") return v @staticmethod def clean_Int8(value): try: v = int(value) except ValueError: raise if v < -127 or v > 127: raise ValueError("Value out of range") return v @staticmethod def clean_UInt16(value): try: v = int(value) except ValueError: raise if v < 0 or v > 65535: raise ValueError("Value out of range") return v @staticmethod def clean_Int16(value): try: v = int(value) except ValueError: raise if v < -32767 or v > 32767: raise ValueError("Value out of range") return v @staticmethod def clean_UInt32(value): try: v = int(value) except ValueError: raise if v < 0 or v > 4294967295: raise ValueError("Value out of range") return v @staticmethod def clean_Int32(value): try: v = int(value) except ValueError: raise if v < -2147483647 or v > 2147483647: raise ValueError("Value out of range") return v @staticmethod def clean_UInt64(value): try: v = int(value) except ValueError: raise if v < 0 or v > 18446744073709551615: raise ValueError("Value out of range") return v @staticmethod def clean_Int64(value): try: v = int(value) except ValueError: raise if v < -9223372036854775807 or v > 9223372036854775807: raise ValueError("Value out of range") return v @staticmethod def clean_Float32(value): return float(value) @staticmethod def clean_Float64(value): return float(value) @staticmethod def clean_String(value): return str(value)
class SeqCollection(Document): ''' ''' meta = { 'allow_inheritance': True, 'strict': False, 'collection': "sequence_collection" } name = StringField(max_length=50, required=True) type = StringField(required=False) ncbi_assembly = StringField(required=False) description = StringField(required=False, default="") organism = StringField(max_length=100) pathways = ListField(EmbeddedDocumentField(PathwaySumary), default=[]) kegg = ListField(EmbeddedDocumentField(PathwaySumary), default=[]) ec_index = BooleanField() go_index = BooleanField() auth = StringField() version = IntField(default=0) pipelines = ListField(EmbeddedDocumentField(AnnotationPipelineResult), default=[]) druggabilityParams = ListField( EmbeddedDocumentField(SeqColDruggabilityParam), default=[]) strains = ListField(EmbeddedDocumentField(Strain), default=[]) strainProjects = ListField(EmbeddedDocumentField(StrainProject), default=[]) strainsProps = ListField(EmbeddedDocumentField(StrainProp), default=[]) tax = EmbeddedDocumentField(TaxEDoc) statistics = ListField(EmbeddedDocumentField(Metric)) uploads = ListField(EmbeddedDocumentField(DataUpload), default=[]) def __init__(self, **kwargs): ''' ''' super(Document, self).__init__(**kwargs) self._sequences = [] def druggabilityParam(self, name, uploader="demo"): return [ x for x in self.druggabilityParams if x.name == name and x.uploader == uploader ] def has_druggability_param(self, name, uploader="demo"): return bool(self.druggabilityParam(name, uploader)) def add_drugability_props_to_genome(self, name, description, target, _type, options=None, user="******"): """ ptype: SeqColDruggabilityParamTypes """ if not self.has_druggability_param(name): dp = SeqColDruggabilityParam(name=name, description=description, target=target, type=_type, uploader=user) if options: dp.options = options self.druggabilityParams.append(dp)
class VPNProfile(Document): meta = {"collection": "vpnprofiles", "strict": False, "auto_create_index": False} name = StringField(unique=True) description = StringField() type = StringField( choices=[ ("vrf", "VRF"), ("vxlan", "VxLAN"), ("vpls", "VPLS"), ("vll", "VLL"), ("evpn", "EVPN"), ("ipsec", "IPSec"), ("gre", "GRE"), ("ipip", "IP-IP"), ], default="vrf", ) workflow = PlainReferenceField(Workflow) # Template.subject to render VPN/VRF.name name_template = ForeignKeyField(Template) # style = ForeignKeyField(Style) # For vrf type -- default prefix profile # Used to create AFI root prefixes default_prefix_profile = PlainReferenceField("ip.PrefixProfile") # Labels labels = ListField(StringField()) effective_labels = ListField(StringField()) # Integration with external NRI and TT systems # Reference to remote system object has been imported from remote_system = PlainReferenceField(RemoteSystem) # Object id in remote system remote_id = StringField() # Object id in BI bi_id = LongField(unique=True) _id_cache = cachetools.TTLCache(maxsize=100, ttl=60) _bi_id_cache = cachetools.TTLCache(maxsize=100, ttl=60) def __str__(self): return self.name @classmethod @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock) def get_by_id(cls, id): return VPNProfile.objects.filter(id=id).first() @classmethod @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"), lock=lambda _: id_lock) def get_by_bi_id(cls, id): return VPNProfile.objects.filter(bi_id=id).first() def clean(self): if self.type == "vrf" and not self.default_prefix_profile: raise ValidationError("default_prefix_profile must be set for vrf type") @classmethod def can_set_label(cls, label): if label.enable_vpnprofile: return True return False
class Role(Document): meta = {'collection': 'role'} name = StringField()
class oohplanrequest(Document): wish = StringField(required=True) filters = ListField(default=[], required=False)
class Employee(Document): meta = {'collection': 'employee'} name = StringField() hired_on = DateTimeField(default=datetime.now) department = ReferenceField(Department) role = ReferenceField(Role)
class TestDoc(Document): name = StringField(required=True)
class Department(Document): meta = {'collection': 'department'} name = StringField()
class RoleModel(Document): meta = {'collection': 'role'} Id = StringField() name = StringField(unique=True) label = StringField()
class CronTab(Document): meta = { "collections": "crontabs", "strict": False, "auto_create_index": False } name = StringField(unique=True) is_active = BooleanField(default=True) description = StringField() # Handler to execute handler = StringField() # Crontab seconds_expr = StringField(default="0") minutes_expr = StringField(default="*") hours_expr = StringField(default="*") days_expr = StringField(default="*") months_expr = StringField(default="*") weekdays_expr = StringField(default="*") years_expr = StringField(default="*") # @todo: notification group # @todo: log settings SCHEDULER = "scheduler" JCLS = "noc.services.scheduler.jobs.cron.CronJob" def __unicode__(self): return self.name def clean(self): try: self.get_entry() except ValueError as e: raise ValidationError("Invalid crontab expression: %s" % e) if not self.get_handler(): raise ValidationError("Invalid handler") @property def crontab_expression(self): """ Returns crontab expression :return: """ return " ".join([ self.seconds_expr or "0", self.minutes_expr or "*", self.hours_expr or "*", self.days_expr or "*", self.months_expr or "*", self.weekdays_expr or "*", self.years_expr or "*" ]) def get_entry(self): """ Crontab Entry :return: """ return crontab.CronTab(self.crontab_expression) def get_next(self): """ Get next run :return: Next datetime or None """ if not self.is_active: return None entry = self.get_entry() delta = next(entry) if not delta: return None return datetime.datetime.now() + datetime.timedelta(seconds=delta) def get_handler(self): """ Get callable from handler :return: """ return get_handler(self.handler) def run(self): """ Called by scheduler job :return: """ handler = self.get_handler() if handler: handler() @classmethod def get_scheduler(cls): return Scheduler(cls.SCHEDULER) def on_save(self): self.ensure_job() def on_delete(self): self.is_active = False self.ensure_job() def ensure_job(self): """ Create or remove scheduler job :return: """ scheduler = self.get_scheduler() if self.is_active: ts = self.get_next() if ts: scheduler.submit(jcls=self.JCLS, key=self.id, ts=ts) return scheduler.remove_job(jcls=self.JCLS, key=self.id)
class Column(EmbeddedDocument): stepIndex = IntField() field = StringField() visible = BooleanField(default=True) pinned = BooleanField(default=False)
class Technology(Document): """ Technology Abstraction to restrict ResourceGroup links """ meta = { "collection": "technologies", "strict": False, "auto_create_index": False, "json_collection": "inv.technologies", "json_unique_fields": ["name"], } # Group | Name name = StringField(unique=True) uuid = UUIDField(binary=True) description = StringField() service_model = StringField() client_model = StringField() single_service = BooleanField(default=False) single_client = BooleanField(default=False) allow_children = BooleanField(default=False) # Object id in BI bi_id = LongField(unique=True) _id_cache = cachetools.TTLCache(maxsize=100, ttl=60) _name_cache = cachetools.TTLCache(maxsize=100, ttl=60) _bi_id_cache = cachetools.TTLCache(maxsize=100, ttl=60) def __unicode__(self): return self.name @classmethod @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock) def get_by_id(cls, id): return Technology.objects.filter(id=id).first() @classmethod @cachetools.cachedmethod(operator.attrgetter("_bi_id_cache"), lock=lambda _: id_lock) def get_by_bi_id(cls, id): return Technology.objects.filter(bi_id=id).first() @classmethod @cachetools.cachedmethod(operator.attrgetter("_name_cache"), lock=lambda _: id_lock) def get_by_name(cls, name): return Technology.objects.filter(name=name).first() def get_json_path(self): p = [quote_safe_path(n.strip()) for n in self.name.split("|")] return os.path.join(*p) + ".json" def to_json(self): r = { "name": self.name, "$collection": self._meta["json_collection"], "uuid": self.uuid, "single_service": self.single_service, "single_client": self.single_client, "allow_children": self.allow_children } if self.description: r["description"] = self.description if self.service_model: r["service_model"] = self.service_model if self.client_model: r["client_model"] = self.client_model return to_json(r, order=[ "name", "$collection", "uuid", "description", "service_model", "client_model", "single_service", "single_client", "allow_children" ])
class Module(EmbeddedDocument): type = StringField(choices=("datasource", "computed", "form"), required=True) datasource = EmbeddedDocumentField(DatasourceModule) form = StringField(null=True) # a Form model object ID will be passed in computed = EmbeddedDocumentField(ComputedModule)
class Link(Document): """ Network links. Always contains a list of 2*N references. 2 - for fully resolved links 2*N for unresolved N-link portchannel N, N > 2 - broadcast media """ meta = { "collection": "noc.links", "strict": False, "auto_create_index": False, "indexes": ["interfaces", "linked_objects", "linked_segments"], } # Optional link name name = StringField() # Optional description description = StringField() # Optional shape shape = StringField() # List of interfaces interfaces = PlainReferenceListField("inv.Interface") # Link type, detected automatically type = StringField( choices=[ # 2 managed objects, 2 linked interfaces ("p", "Point-to-Point"), # 2 managed objects, even number of linked interfaces (>2) ("a", "Point-to-Point Aggregated"), # >2 managed objects, one uplink ("m", "Point-to-Multipoint"), # >2 managed objects, no dedicated uplink ("M", "Multipoint-to-Multipoint"), # Unknown ("u", "Unknown"), ], default="u", ) # List of linked objects linked_objects = ListField(IntField()) # List of linked segments linked_segments = ListField(ObjectIdField()) # Name of discovery method or "manual" discovery_method = StringField() # Timestamp of first discovery first_discovered = DateTimeField(default=datetime.datetime.now) # Timestamp of last confirmation last_seen = DateTimeField() # L2 path cost l2_cost = IntField(default=1) # L3 path cost l3_cost = IntField(default=1) def __str__(self): if self.interfaces: return "(%s)" % ", ".join(smart_text(i) for i in self.interfaces) else: return "Stale link (%s)" % self.id def iter_changed_datastream(self, changed_fields=None): if config.datastream.enable_managedobject: for mo_id in self.linked_objects: yield "managedobject", mo_id def clean(self): self.linked_objects = list( sorted(set(i.managed_object.id for i in self.interfaces))) self.linked_segments = list( sorted(set(i.managed_object.segment.id for i in self.interfaces))) self.type = self.get_type() def contains(self, iface): """ Check link contains interface :return: boolean """ return iface in self.interfaces @property def is_ptp(self) -> bool: """ Check link is point-to-point link :return: """ return self.type == "p" or self.type == "a" @property def is_lag(self) -> bool: """ Check link is unresolved LAG :return: """ return self.type == "p" or self.type == "a" @property def is_broadcast(self) -> bool: """ Check link is broadcast media :return: """ return not self.is_ptp and not self.is_lag @property def is_loop(self) -> bool: """ Check link is looping to same object :return: """ return len(self.linked_objects) == 1 @property def interface_ids(self): """ Returns list of interface ids, avoiding dereference :return: """ def q(i): if hasattr(i, "id"): return i.id return i return [q(iface) for iface in self._data.get("interfaces", [])] def other(self, interface): """ Return other interfaces of the link :param interface: :return: """ return [i for i in self.interfaces if i.id != interface.id] def other_ptp(self, interface): """ Return other interface of ptp link :param interface: :return: """ return self.other(interface)[0] def touch(self, method=None): """ Touch last_seen """ now = datetime.datetime.now() op = {"last_seen": now} self.last_seen = now if method: self.discovery_method = method op["discovery_method"] = method # Do not save to prevent rebuilding topology self._get_collection().update({"_id": self.id}, {"$set": op}) # self.save() @classmethod def object_links(cls, object): return Link.objects.filter(linked_objects=object.id) @classmethod def object_links_count(cls, object): return Link.objects.filter(linked_objects=object.id).count() def on_save(self): if not hasattr( self, "_changed_fields") or "interfaces" in self._changed_fields: self.update_topology() def on_delete(self): self.update_topology() @property def managed_objects(self): """ List of connected managed objects """ from noc.sa.models.managedobject import ManagedObject return list( ManagedObject.objects.filter(id__in=list(self.linked_objects))) @property def segments(self): """ List of segments connected by link :return: """ from noc.inv.models.networksegment import NetworkSegment return list(NetworkSegment.objects.filter(id__in=self.linked_segments)) def update_topology(self): for mo in self.managed_objects: mo.update_topology() def get_type(self): """ Detect link type :return: Link type as value for .type """ n_objects = len(self.linked_objects) n_interfaces = len(self.interfaces) if n_objects == 2 and n_interfaces == 2: return "p" # Point-to-point if n_objects == 2 and n_interfaces > 2 and n_interfaces % 2 == 0: d = defaultdict(int) # object -> count for i in self.interfaces: d[i.managed_object.id] += 1 k = list(d) if d[k[0]] == d[k[1]]: return "a" # Point-to-Point aggregated if n_objects > 2: if self.type == "m": return "m" else: return "M" return "u"
class ConfDBQuery(Document): meta = { "collection": "confdbqueries", "strict": False, "auto_create_index": False, "json_collection": "cm.confdbqueries", "json_unique_fields": ["name"], } name = StringField(unique=True) uuid = UUIDField(binary=True) description = StringField() source = StringField() params = ListField(EmbeddedDocumentField(ConfDBQueryParam)) allow_object_filter = BooleanField(default=False) allow_interface_filter = BooleanField(default=False) allow_object_validation = BooleanField(default=False) allow_interface_validation = BooleanField(default=False) allow_object_classification = BooleanField(default=False) allow_interface_classification = BooleanField(default=False) require_raw = BooleanField(default=False) _id_cache = cachetools.TTLCache(maxsize=100, ttl=60) def __str__(self): return self.name @classmethod @cachetools.cachedmethod(operator.attrgetter("_id_cache"), lock=lambda _: id_lock) def get_by_id(cls, id): return ConfDBQuery.objects.filter(id=id).first() def get_json_path(self): p = [quote_safe_path(n.strip()) for n in self.name.split("|")] return os.path.join(*p) + ".json" def query(self, engine, **kwargs): """ Run query against ConfDB engine :param engine: ConfDB engine :param kwargs: Optional arguments :return: """ params = kwargs.copy() for p in self.params: params[p.name] = p.get_parameter().clean( params.get(p.name, p.default)) for ctx in engine.query(self.source, **params): yield ctx def any(self, engine, **kwargs): """ Run query agains ConfDB engine and return True if any result found :param engine: ConfDB engine :param kwargs: Optional arguments :return: True if any result found """ return engine.any(self.source, **kwargs) def to_json(self): r = { "name": self.name, "$collection": self._meta["json_collection"], "uuid": self.uuid, "source": self.source, "params": [p.to_json() for p in self.params], "allow_object_filter": self.allow_object_filter, "allow_interface_filter": self.allow_interface_filter, "allow_object_validation": self.allow_object_validation, "allow_interface_validation": self.allow_interface_validation, "allow_object_classification": self.allow_object_classification, "allow_interface_classification": self.allow_interface_classification, "require_raw": self.require_raw, } if self.description: r["description"] = self.description return to_json( r, order=[ "name", "$collection", "uuid", "description", "source", "params", "allow_object_filter", "allow_interface_filter", "allow_object_validation", "allow_interface_validation", "allow_object_classification", "allow_interface_classification", "require_raw", ], )
class Role(Document): """ This class represents a role, which consists of a list of API calls that are permitted. """ meta = { 'collection': COLLECTION_ROLES, 'indexes': [{ 'fields': ['name'], 'unique': True }] } name = StringField(required=True, null=False, unique=True, max_length=MAX_STR_LEN) description = StringField(required=False, max_length=MAX_BIGSTR_LEN) allowed_api_calls = ListField(StringField(required=True, null=False, max_length=MAX_STR_LEN), required=True, null=False) users = ListField( StringField(required=True, null=False, max_length=MAX_STR_LEN)) @staticmethod def list_roles(): """ Return a list of role objects. """ return Role.objects() # pylint: disable=no-member @staticmethod def get_role(role_name): """ Fetch a role by name. """ return Role.objects.get(name=role_name) # pylint: disable=no-member @property def document(self): """ This property filters and returns the JSON information for a queried role. """ return { 'name': self.name, 'description': self.description, 'allowed_api_calls': self.allowed_api_calls, 'users': self.users, } def add_member(self, username): """ Add a user to this role if it exists. """ user = User.get_user(username) if user.username not in self.users: #pylint: disable=unsupported-membership-test self.users.append(user.username) # pylint: disable=no-member self.save() # TODO: Raise exception def remove_member(self, username): """ Remove a user from this role. """ if username in self.users: #pylint: disable=unsupported-membership-test self.users.remove(username) #pylint: disable=no-member self.save() # TODO: Raise exception if user not in list def remove(self): """ Remove this document from the database, and perform any related cleanup. """ self.delete()