def import_check_perms(): accepted_row = [] forbidden_row = [] if get_model_id(m) == 'ip.Address': accepted_prefixes = get_model( 'ip.PrefixAccess').objects.filter( user=request.user, can_change=True).values_list('prefix', 'vrf_id') csv_reader = csv.DictReader(request.FILES['file']) keys = None for row in csv_reader: if not keys: keys = row.keys() for prefix in accepted_prefixes: if self.address_in_network(row['address'], prefix[0])\ and get_model('ip.VRF').objects.get(id=prefix[1]).name == row['vrf']: accepted_row.append(row) if row['address'] in forbidden_row: forbidden_row.remove(row['address']) else: forbidden_row.append(row['address']) forbidden_ip = list(set(forbidden_row)) new_csv_file = StringIO.StringIO() dict_writer = csv.DictWriter(new_csv_file, keys) dict_writer.writeheader() dict_writer.writerows(accepted_row) check_msg = ", \n\nskipped because of PrefixAccess - %d IP: \n%s" % ( len(forbidden_ip), "\n".join(forbidden_ip)) else: new_csv_file = request.FILES["file"] check_msg = "" return new_csv_file, check_msg
def handle_migrate(self, dry_run=False, migration=None, profile=None, profiles=None, *args, **kwargs): if profile not in self.PROFILE_MAP: self.die("Invalid profile %s. Possible profiles:\n%s" % (profile, "\n".join(self.PROFILE_MAP))) wfm = WFMigration.objects.filter(name=migration).first() if not wfm: self.die("Invalid migration %s" % wfm.name) pmodel = get_model(profile) imodel = get_model(self.PROFILE_MAP[profile]) for pid in profiles: p = pmodel.get_by_id(pid) if not p: self.die("Profile %s is not found" % pid) self.print("Migrating profile %s" % p) tr = wfm.get_translation_map(p.workflow) if not tr: self.print("No translations") continue for ostate in tr: c = imodel.objects.filter(state=ostate.id).count() self.print(" %s -> %s: %d records" % (ostate, tr[ostate], c)) if c and not dry_run: for o in imodel.objects.filter(state=ostate.id): o.set_state(tr[ostate])
def ensure_pending_models(): """ Django's ForeignKey with string rel resolves when appropriate model class being imported. :return: """ for m in list(pending_lookups): get_model(".".join(m)) # Ensure model loading
def ensure_models(cls): """ Load all models referred from ManagedObjectSelector to fully initialize django models :return: """ if hasattr(cls, "_ensured_models"): return for model_id in cls._REFERRED_MODELS: get_model(model_id) setattr(cls, "_ensured_models", True)
def get_model(self, datastream): if isinstance(datastream, tuple): return tuple(self.get_model(ds) for ds in datastream) model_id = self.MODELS.get(datastream) if not model_id: self.die("Unsupported datastream") if isinstance(model_id, tuple): model = tuple(get_model(mid) for mid in model_id) else: model = get_model(model_id) if not model: self.die("Invalid model") return model
def reset_model_labels(cls, model_id: str, labels: List[str]): """ Unset labels from effective_labels field on models :param model_id: :param labels: :return: """ from django.db import connection model = get_model(model_id) if is_document(model): coll = model._get_collection() coll.bulk_write([ UpdateMany[{ "effective_labels": { "$in": labels } }, { "$pull": { "effective_labels": { "$in": labels } } }, ] ]) else: sql = f""" UPDATE {model._meta.db_table} SET effective_labels=array( SELECT unnest(effective_labels) EXCEPT SELECT unnest(%s::varchar[]) ) WHERE effective_labels && %s::varchar[] """ cursor = connection.cursor() cursor.execute(sql, [labels, labels])
def handle(self, host=None, port=None, *args, **options): connect() db = get_db() collections = set(db.list_collection_names()) for model_id in iter_model_id(): model = get_model(model_id) if not model: self.die("Invalid model: %s" % model_id) if not is_document(model): continue # Rename collections when necessary legacy_collections = model._meta.get("legacy_collections", []) for old_name in legacy_collections: if old_name in collections: new_name = model._meta["collection"] self.print("[%s] Renaming %s to %s" % (model_id, old_name, new_name)) db[old_name].rename(new_name) break # Ensure only documents with auto_create_index == False if model._meta.get("auto_create_index", True): continue # Index model self.index_model(model_id, model) # Index datastreams self.index_datastreams() # Index GridVCS self.index_gridvcs() # Index mongo cache self.index_cache() # Index datasource cache self.index_datasource_cache() # @todo: Detect changes self.print("OK")
def iter_collections(cls): from noc.models import COLLECTIONS, get_model for c in COLLECTIONS: cm = get_model(c) cn = cm._meta["json_collection"] cls._MODELS[cn] = cm yield Collection(cn)
def get_model_references(): """ Build model reference map :return: [(model id, [(remote model, remote field), ..], ..] """ from noc.lib.nosql import PlainReferenceField, ForeignKeyField from noc.core.model.fields import DocumentReferenceField from django.db.models import ForeignKey def add_ref(model, ref_model, ref_field): model_id = get_model_id(model) refs[model_id] += [(ref_model, ref_field)] refs = defaultdict(list) # model -> [(ref model, ref field)] for model_id in iter_model_id(): model = get_model(model_id) if not model: continue if is_document(model): # mongoengine document for fn in model._fields: f = model._fields[fn] if isinstance(f, PlainReferenceField): add_ref(f.document_type, model_id, fn) elif isinstance(f, ForeignKeyField): add_ref(f.document_type, model_id, fn) else: # Django model for f in model._meta.fields: if isinstance(f, ForeignKey): add_ref(f.rel.to, model_id, f.name) elif isinstance(f, DocumentReferenceField): add_ref(f.document, model_id, f.name) return [(m, refs[m]) for m in refs]
def set_dereference(self): if isinstance(self.document, str): self.document = get_model(self.document) self.field.document = self.document if hasattr(self.document, "get_by_id"): self.dereference = self.dereference_cached else: self.dereference = self.dereference_uncached
def test_model_loading(model_id): """ Check model referred by id can be loaded :param model_id: :return: """ model = get_model(model_id) assert model is not None, "Cannot load model %s" % model_id
def g_model(self, model_id): try: md = get_model(model_id) if md and hasattr(md, "name"): return md except Exception as e: logger.info("No model: Error %s", e) return None
def test_model_id(model_id): """ Check model has same model_id as referred """ model = get_model(model_id) if model: real_model_id = get_model_id(model) assert real_model_id == model_id
def test_model_meta(model_id): model = get_model(model_id) assert model if is_document(model): pytest.skip("Not a model") assert model._meta assert model._meta.app_label assert model._meta.db_table
def iter_models(name): nn = "_%s" % name c = cfg.get(nn) if c is None: c = [(get_model(x[0]), x[0], x[1]) for x in cfg[name]] cfg[nn] = c for model, model_id, field in c: yield model, model_id, field
def fix(): for model_id in BI_SYNC_MODELS: model = get_model(model_id) print("[%s]" % model_id) if is_document(model): fix_document(model) else: fix_model(model)
def setup_cleaners(self): for field in self.model._meta.local_fields: if isinstance(field, ForeignKey): self.add_ref_cleaner(field.name, field.remote_field.model) elif isinstance(field, DocumentReferenceField): remote = field.document if isinstance(remote, str): remote = get_model(remote) self.add_ref_cleaner(field.name, remote)
def document_type(self): if isinstance(self.document_type_obj, six.string_types): if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT: self.document_type_obj = self.owner_document elif isinstance(self.document_type_obj, six.string_types): self.document_type_obj = get_model(self.document_type_obj) else: self.document_type_obj = get_document(self.document_type_obj) return self.document_type_obj
def handle_rebuild(self, *args, **options): for model_id in FTS_MODELS: self.stdout.write("Indexing %s: " % model_id) model = get_model(model_id) n = 0 for o in model.objects.all(): TextIndex.update_index(model, o) n += 1 self.stdout.write("%d records indexed\n" % n)
def do_mapping(self, scope, local_ids=None, remote_system=None, remote_ids=None): """ Perform mapping :param scope: scope name :param local_ids: List of Local id :param remote_system: Remote system id :param remote_ids: List of Id from remote system :param kwargs: Ignored args :return: """ def format_obj(o): r = {"scope": scope, "id": str(o.id), "mappings": []} if o.remote_system: r["mappings"] += [{ "remote_system": str(o.remote_system.id), "remote_id": str(o.remote_id) }] return r # Get model to query model = get_model(self.SCOPES[scope]) if not model: return 400, self.error_msg("Invalid scope") # Query remote objects result = [] if remote_system and remote_ids: rs = RemoteSystem.get_by_id(remote_system) if not rs: return 404, self.error_msg("Remote system not found") if len(remote_ids) == 1: qs = model.objects.filter(remote_system=rs.id, remote_id=remote_ids[0]) else: qs = model.objects.filter(remote_system=rs.id, remote_id__in=remote_ids) result += [format_obj(o) for o in qs] # Query local objects seen = set(o["id"] for o in result) # Skip already collected objects local_ids = local_ids or [] local_ids = [o for o in local_ids if o not in seen] if local_ids: if len(local_ids) == 1: qs = model.objects.filter(id=local_ids[0]) else: qs = model.objects.filter(id__in=local_ids) result += [format_obj(o) for o in qs] # 404 if no objects found if not result: return 404, self.error_msg("Not found") return 200, list(sorted(result, key=operator.itemgetter("id")))
def iter_lazy_labels(instance): from noc.main.models.label import MATCH_OPS model = get_model("main.Label") category = cfg.get("clean_lazy_labels") if not (hasattr(instance, "iter_lazy_labels") or category): return for ops in MATCH_OPS: ll = model.objects.filter(name=f"noc::{category}::{instance.name}::{ops}").first() if not ll: continue yield ll
def get_data(self): # Servers if self.object.technology.service_model: services = [] s_model = get_model(self.object.technology.service_model) card = self.get_card_name(self.object.technology.service_model) for i in s_model.objects.filter( effective_service_groups=str(self.object.id)): services += [{ "id": i.id, "card": card, "label": smart_text(i) }] else: services = [] # Clients if self.object.technology.client_model: clients = [] c_model = get_model(self.object.technology.client_model) card = self.get_card_name(self.object.technology.client_model) for i in c_model.objects.filter( effective_client_groups=str(self.object.id)): clients += [{"id": i.id, "card": card, "label": smart_text(i)}] else: clients = [] # Data r = { "object": self.object, "technology": self.object.technology, "allow_services": bool(self.object.technology.service_model), "allow_clients": bool(self.object.technology.client_model), "services": services, "clients": clients, "children": [], } # Append children for rg in ResourceGroup.objects.filter( parent=self.object.id).order_by("name"): r["children"] += [rg] return r
def iter_formats( cls, ) -> Iterable[Tuple[str, Callable[[Dict[str, Any]], Iterable[Dict[str, Any]]]]]: # Do not load in datastream service DataStreamConfig = getattr(cls, "_DataStreamConfig", None) if not DataStreamConfig: cls._DataStreamConfig = get_model("main.DataStreamConfig") DataStreamConfig = cls._DataStreamConfig cfg = DataStreamConfig.get_by_name(cls.name) if cfg: yield from cfg.iter_formats()
def test_document_meta(model_id): model = get_model(model_id) assert model if not is_document(model): pytest.skip("Not a document") assert model._meta.get( "allow_inheritance" ) is None, "'allow_inheritance' is obsolete and must not be used" assert not model._meta.get( "strict", True), "Document must be declared as {'strict': False}" assert not model._meta.get( "auto_create_index", True ), "Index autocreation must not be used (Use auto_create_index: False)"
def import_check_perms(): accepted_row = [] forbidden_row = [] if get_model_id(m) == "ip.Address": accepted_prefixes = ( get_model("ip.PrefixAccess") .objects.filter(user=request.user, can_change=True) .values_list("prefix", "vrf_id") ) csv_reader = csv.DictReader(request.FILES["file"]) keys = None for row in csv_reader: if not keys: keys = list(row) for prefix in accepted_prefixes: if ( self.address_in_network(row["address"], prefix[0]) and get_model("ip.VRF").objects.get(id=prefix[1]).name == row["vrf"] ): accepted_row.append(row) if row["address"] in forbidden_row: forbidden_row.remove(row["address"]) else: forbidden_row.append(row["address"]) forbidden_ip = list(set(forbidden_row)) new_csv_file = StringIO() dict_writer = csv.DictWriter(new_csv_file, keys) dict_writer.writeheader() dict_writer.writerows(accepted_row) check_msg = ", \n\nskipped because of PrefixAccess - %d IP: \n%s" % ( len(forbidden_ip), "\n".join(forbidden_ip), ) else: new_csv_file = StringIO(request.FILES["file"].read().decode()) check_msg = "" return new_csv_file, check_msg
def iter_references(): for model_id in iter_model_id(): model = get_model(model_id) if not model: continue if is_document(model): # MongoEngine document for fn in model._fields: f = model._fields[fn] if isinstance(f, PlainReferenceField): yield f.document_type, model_id, fn elif isinstance(f, ForeignKeyField): yield f.document_type, model_id, fn else: # Django model for f in model._meta.fields: if isinstance(f, ForeignKey): yield f.remote_field.model, model_id, f.name elif isinstance(f, DocumentReferenceField): f_doc = f.document if not is_document(f_doc): f_doc = get_model(f_doc) yield f_doc, model_id, f.name
def handle_export(self, list_collection=False, export_path=None, export_collections=None, export_model_names=None, export_model_uuids=None): MODELS = {} for c in COLLECTIONS: cm = get_model(c) cn = cm._meta["json_collection"] MODELS[cn] = cm if list_collection is not None: if list_collection is True: for c in Collection.iter_collections(): print("%s" % c.name, file=self.stdout) else: if list_collection not in MODELS: print("Collection not found", file=self.stdout) return objs = MODELS[list_collection].objects.all().order_by('name') for o in objs: print("uuid:%s name:\"%s\"" % (o.uuid, o.name), file=self.stdout) else: if not export_path or not export_collections: return if not os.path.isdir(export_path): self.die("Path not found: %s" % export_path) for ecname in export_collections: if ecname not in MODELS: print("Collection not found", file=self.stdout) continue kwargs = {} if export_model_names: kwargs['name__in'] = export_model_names elif export_model_uuids: kwargs['uuid__in'] = export_model_uuids objs = MODELS[ecname].objects.filter(**kwargs).order_by('name') for o in objs: path = os.path.join( export_path, ecname, o.get_json_path() ) print("export \"%s\" to %s" % (o.name, path), file=self.stdout) safe_rewrite( path, o.to_json(), mode=0o644 )
def api_search(self, request, query): r = [] for qr in TextIndex.search(query): model = get_model(qr["model"]) if not model: continue # Invalid model url = model.get_search_result_url(qr["object"]) r += [{ "title": str(qr["title"]), "card": str(qr["card"]), "tags:": [str(x) for x in (qr.get("tags", []) or [])], "url": url, "score": qr["score"] }] return r
def update_for_model(model): from django.db import connection from noc.models import get_model # Get model data name_field = ORDER_MAP_MODELS[model] coll = get_model(model)._get_collection() data = [(model, str(d["_id"]), d[name_field]) for d in coll.find({}, { "_id": 1, name_field: 1 })] c = connection.cursor() c.execute("DELETE FROM main_ordermap WHERE model = %s", [model]) c.execute("INSERT INTO main_ordermap(model, ref_id, name) VALUES " + ",".join(c.mogrify("(%s,%s,%s)", d) for d in data))
def handle_expire(self, dry_run=False, model=None, *args, **kwargs): model = model or self.EXPIRE_MODELS now = datetime.datetime.now() for m in model: c = get_model(m) if not c: self.die("Invalid model: %s" % m) if not getattr(c, "_has_expired", False): self.die("Model %s does not support expiration" % m) self.print("Expiring %s:" % m) for c in c.objects.filter(expired__lt=now): if not c.state.ttl: continue self.print(" %s" % c) if not dry_run: c.fire_event("expired")