Esempio n. 1
0
 def migrate(self):
     mdb = self.mongo_db
     # Get default prefix profile
     coll = mdb["prefixprofiles"]
     d = coll.find_one({"name": "default"})
     default_prefix_profile = d["_id"]
     #
     coll = mdb["vpnprofiles"]
     default_id = bson.ObjectId()
     wf = bson.ObjectId("5a01d980b6f529000100d37a")
     profiles = [
         {
             "_id": default_id,
             "name": "default VRF",
             "type": "vrf",
             "description": "Default VRF profile",
             "workflow": wf,
             "default_prefix_profile": default_prefix_profile,
             "bi_id": bson.int64.Int64(bi_hash(default_id)),
         }
     ]
     # Convert styles
     style_profiles = {None: default_id}
     for (style_id,) in self.db.execute("SELECT DISTINCT style_id FROM ip_vrf"):
         if not style_id:
             continue
         p_id = bson.ObjectId()
         p = {
             "_id": p_id,
             "name": "VRF Style %s" % style_id,
             "type": "vrf",
             "description": "Auto-converted for VRF style %s" % style_id,
             "workflow": wf,
             "style": style_id,
             "default_prefix_profile": default_prefix_profile,
             "bi_id": bson.int64.Int64(bi_hash(p_id)),
         }
         style_profiles[style_id] = p_id
         profiles += [p]
     # Insert profiles to database
     coll.insert_many(profiles)
     # Create Prefix.profile field
     self.db.add_column(
         "ip_vrf", "profile", DocumentReferenceField("vc.VPNProfile", null=True, blank=True)
     )
     # Migrate profile styles
     for style_id in style_profiles:
         if style_id:
             cond = "style_id = %s" % style_id
         else:
             cond = "style_id IS NULL"
         self.db.execute(
             "UPDATE ip_vrf SET profile = %%s WHERE %s" % cond, [str(style_profiles[style_id])]
         )
     # Make Prefix.profile not nullable
     self.db.execute("ALTER TABLE ip_vrf ALTER profile SET NOT NULL")
     # Drop Prefix.style
     self.db.delete_column("ip_vrf", "style_id")
Esempio n. 2
0
 def migrate(self):
     coll = self.mongo_db["addressprofiles"]
     default_id = bson.ObjectId()
     wf = bson.ObjectId("5a01d980b6f529000100d37a")
     profiles = [
         {
             "_id": default_id,
             "name": "default",
             "description": "Default address profile",
             "workflow": wf,
             "bi_id": bson.int64.Int64(bi_hash(default_id)),
         }
     ]
     # Convert styles
     style_profiles = {}
     for (style_id,) in self.db.execute("SELECT DISTINCT style_id FROM ip_address"):
         if not style_id:
             style_profiles[None] = default_id
             continue
         p_id = bson.ObjectId()
         p = {
             "_id": p_id,
             "name": "Style %s" % style_id,
             "description": "Auto-converted for style %s" % style_id,
             "workflow": wf,
             "style": style_id,
             "bi_id": bson.int64.Int64(bi_hash(p_id)),
         }
         style_profiles[style_id] = p_id
         profiles += [p]
     # Insert profiles to database
     coll.insert_many(profiles)
     # Create Prefix.profile field
     self.db.add_column(
         "ip_address",
         "profile",
         DocumentReferenceField("ip.AddressProfile", null=True, blank=True),
     )
     # Migrate profile styles
     for style_id in style_profiles:
         if style_id:
             cond = "style_id = %s" % style_id
         else:
             cond = "style_id IS NULL"
         self.db.execute(
             "UPDATE ip_address SET profile = %%s WHERE %s" % cond,
             [str(style_profiles[style_id])],
         )
     # Make Prefix.profile not nullable
     self.db.execute("ALTER TABLE ip_address ALTER profile SET NOT NULL")
     # Drop Prefix.style
     self.db.delete_column("ip_address", "style_id")
Esempio n. 3
0
 def migrate(self):
     # Get existing termination groups
     tg_data = self.db.execute(
         "SELECT id, name, description, remote_system, remote_id, tags FROM sa_terminationgroup"
     )
     if not tg_data:
         return  # Nothing to convert
     bulk = []
     # Create root node for migrated termination groups
     root_id = bson.ObjectId()
     bulk += [
         InsertOne({
             "_id": root_id,
             "name": "Converted T.G.",
             "parent": None,
             "description": "Created from former termination groups",
             "technology":
             bson.ObjectId("5b6d6819d706360001a0b716"),  # Group
             "bi_id": bson.Int64(bi_hash(root_id)),
         })
     ]
     # Attach termination groups
     for id, name, description, remote_system, remote_id, tags in tg_data:
         new_id = bson.ObjectId()
         bulk += [
             InsertOne({
                 "_id":
                 new_id,
                 "name":
                 name,
                 "parent":
                 root_id,
                 "description":
                 description,
                 # May be changed by phone migration
                 "technology":
                 bson.ObjectId("5b6d6be1d706360001f5c04e"
                               ),  # Network | IPoE Termination
                 "remote_system":
                 bson.ObjectId(remote_system) if remote_system else None,
                 "remote_id":
                 remote_id,
                 "bi_id":
                 bson.Int64(bi_hash(new_id)),
                 "_legacy_id":
                 id,  # To be removed in future migrations
             })
         ]
     # Apply groups
     self.mongo_db.resourcegroups.bulk_write(bulk)
Esempio n. 4
0
 def migrate(self):
     MODELS = [
         "sa_administrativedomain",
         "sa_authprofile",
         "sa_managedobject",
         "sa_managedobjectprofile",
         "sa_terminationgroup",
     ]
     # Update postgresql tables
     for table in MODELS:
         rows = self.db.execute("SELECT id FROM %s WHERE bi_id IS NULL" %
                                table)
         values = ["(%d, %d)" % (r[0], bi_hash(r[0])) for r in rows]
         while values:
             chunk, values = values[:PG_CHUNK], values[PG_CHUNK:]
             self.db.execute("""
                 UPDATE %s AS t
                 SET
                   bi_id = c.bi_id
                 FROM (
                   VALUES
                   %s
                 ) AS c(id, bi_id)
                 WHERE c.id = t.id
                 """ % (table, ",\n".join(chunk)))
     # Update mongodb collections
     mdb = self.mongo_db
     for coll_name in [
             "noc.profiles", "noc.services", "noc.serviceprofiles"
     ]:
         coll = mdb[coll_name]
         updates = []
         for d in coll.find({"bi_id": {"$exists": False}}, {"_id": 1}):
             updates += [
                 UpdateOne(
                     {"_id": d["_id"]},
                     {"$set": {
                         "bi_id": bson.Int64(bi_hash(d["_id"]))
                     }})
             ]
             if len(updates) >= MONGO_CHUNK:
                 coll.bulk_write(updates)
                 updates = []
         if updates:
             coll.bulk_write(updates)
     # Alter bi_id fields and create indexes
     for table in MODELS:
         self.db.execute("ALTER TABLE %s ALTER bi_id SET NOT NULL" % table)
         self.db.create_index(table, ["bi_id"], unique=True)
Esempio n. 5
0
 def forwards(self):
     # Update mongodb collections
     mdb = get_db()
     for coll_name in ["noc.firmwares",
                       "noc.interface_profiles",
                       "noc.networksegments",
                       "noc.networksegmentprofiles",
                       "noc.objects",
                       "noc.platforms",
                       "noc.vendors"]:
         coll = mdb[coll_name]
         updates = []
         for d in coll.find({"bi_id": {"$exists": False}},
                            {"_id": 1}):
             updates += [
                 UpdateOne({
                     "_id": d["_id"]
                 }, {
                     "$set": {
                         "bi_id": bson.Int64(bi_hash(d["_id"]))
                     }
                 })
             ]
             if len(updates) >= MONGO_CHUNK:
                 coll.bulk_write(updates)
                 updates = []
         if updates:
             coll.bulk_write(updates)
Esempio n. 6
0
 def extract(self):
     o = Object._get_collection().with_options(
         read_preference=ReadPreference.SECONDARY_PREFERRED)
     for obj in o.find(
         {},
         {
             "_id": 1,
             "bi_id": 1,
             "name": 1,
             "container": 1,
             "data.address.text": 1
         },
             no_cursor_timeout=True,
     ):
         address = [
             a for a in obj["data"]
             if a and a["interface"] == "address" and a["attr"] == "text"
         ]
         yield (
             obj["bi_id"],
             obj["_id"],
             obj.get("name", ""),
             bi_hash(obj["container"]) if obj.get("container") else "",
             address[0] if address else "",
         )
Esempio n. 7
0
 def forwards(self):
     coll = get_db()["prefixprofiles"]
     default_id = bson.ObjectId()
     wf = bson.ObjectId("5a01d980b6f529000100d37a")
     profiles = [{
         "_id": default_id,
         "name": "default",
         "description": "Default prefix profile",
         "workflow": wf,
         "bi_id": bson.int64.Int64(bi_hash(default_id))
     }]
     # Convert styles
     style_profiles = {}
     for style_id, in db.execute("SELECT DISTINCT style_id FROM ip_prefix"):
         if not style_id:
             style_profiles[None] = default_id
             continue
         p_id = bson.ObjectId()
         p = {
             "_id": p_id,
             "name": "Style %s" % style_id,
             "description": "Auto-converted for style %s" % style_id,
             "workflow": wf,
             "style": style_id,
             "bi_id": bson.int64.Int64(bi_hash(p_id))
         }
         style_profiles[style_id] = p_id
         profiles += [p]
     # Insert profiles to database
     coll.insert(profiles)
     # Create Prefix.profile field
     db.add_column(
         "ip_prefix", "profile",
         DocumentReferenceField("ip.PrefixProfile", null=True, blank=True))
     # Migrate profile styles
     for style_id in style_profiles:
         if style_id:
             cond = "style_id = %s" % style_id
         else:
             cond = "style_id IS NULL"
         db.execute("UPDATE ip_prefix SET profile = %%s WHERE %s" % cond,
                    [str(style_profiles[style_id])])
     # Make Prefix.profile not nullable
     db.execute("ALTER TABLE ip_prefix ALTER profile SET NOT NULL")
     # Drop Prefix.style
     db.drop_column("ip_prefix", "style_id")
Esempio n. 8
0
 def get_data(self):
     # type: () -> Dict[str, Any]
     r = {
         "object": self.object,
         "paths": [],
         "link_sets": 0,
         "if_hash": {},
         "error": None,
         "ajax_query_key": None,
     }  # type: Dict[str, Any]
     mo = self.object.managed_object
     target_level = (mo.object_profile.level // 10 + 1) * 10
     try:
         finder = KSPFinder(
             mo,
             ManagedObjectLevelGoal(target_level),
             constraint=self.get_constraint(),
             n_shortest=self.N_PATHS,
         )
         for path in finder.iter_shortest_paths():
             items = []  # type: List[Dict[str, Any]]
             ingress_links = [[self.object]]  # type: List[List[Interface]]
             for pi in path:
                 item = {
                     "object": pi.start,
                     "ingress": ingress_links,
                     "egress": [],
                 }  # type: Dict[str, Any]
                 ingress_links = []
                 for link in pi.links:
                     egress, ingress = self.split_interfaces(pi.start, link.interfaces)
                     ingress_links += [ingress]
                     item["egress"] += [egress]
                 if item["ingress"] == item["egress"]:
                     item["ingress"] = []
                 r["link_sets"] = max(r["link_sets"], len(item["egress"]))
                 items += [item]
             items += [{"object": pi.end, "ingress": ingress_links, "egress": []}]
             r["paths"] += [items]
     except ValueError as e:
         r["error"] = str(e)
         return r
     # Build interface hashes
     to_collect = set()  # type: Set[Tuple[int, int, str]]
     for path in r["paths"]:
         for item in path:
             for direction in ("ingress", "egress"):
                 for link in item[direction]:
                     for iface in link:
                         ifname = iface.name
                         if ifname not in r["if_hash"]:
                             r["if_hash"][ifname] = bi_hash(ifname)
                         to_collect.add(
                             (iface.managed_object.id, iface.managed_object.bi_id, ifname)
                         )
     # @todo: Encrypt
     r["ajax_query_key"] = self.encode_query(to_collect)
     return r
Esempio n. 9
0
def fix_document(model):
    coll = model._get_collection()
    bulk = []
    for d in coll.find({"bi_id": {"$exists": False}}, {"_id": 1}):
        bi_id = bi_hash(d["_id"])
        bulk += [UpdateOne({"_id": d["_id"]}, {"$set": {"bi_id": bson.Int64(bi_id)}})]
    if bulk:
        print("    Update %d items" % len(bulk))
        coll.bulk_write(bulk)
Esempio n. 10
0
 def migrate(self):
     # Update mongodb collections
     mdb = self.mongo_db
     for coll_name in ["noc.alarmclasses"]:
         coll = mdb[coll_name]
         updates = []
         for d in coll.find({"bi_id": {"$exists": False}}, {"_id": 1}):
             updates += [
                 UpdateOne({"_id": d["_id"]}, {"$set": {"bi_id": bson.Int64(bi_hash(d["_id"]))}})
             ]
             if len(updates) >= MONGO_CHUNK:
                 coll.bulk_write(updates)
                 updates = []
         if updates:
             coll.bulk_write(updates)
Esempio n. 11
0
 def extract(self):
     o = Object._get_collection().with_options(
         read_preference=ReadPreference.SECONDARY_PREFERRED)
     for obj in o.find({}, {
             "_id": 1,
             "bi_id": 1,
             "name": 1,
             "container": 1,
             "data.address.text": 1
     },
                       no_cursor_timeout=True):
         data = obj.get("data", {})
         yield (obj["bi_id"], obj["_id"], obj.get("name", ""),
                bi_hash(obj["container"]) if obj.get("container") else "",
                data["address"].get("text", "")
                if data and "address" in data else "")
Esempio n. 12
0
 def forwards(self):
     # Update mongodb collections
     mdb = get_db()
     for coll_name in ["noc.metrictypes"]:
         coll = mdb[coll_name]
         updates = []
         for d in coll.find({"bi_id": {"$exists": False}}, {"_id": 1}):
             updates += [
                 UpdateOne(
                     {"_id": d["_id"]},
                     {"$set": {
                         "bi_id": bson.Int64(bi_hash(d["_id"]))
                     }})
             ]
             if len(updates) >= MONGO_CHUNK:
                 coll.bulk_write(updates)
                 updates = []
         if updates:
             coll.bulk_write(updates)
Esempio n. 13
0
 def migrate(self):
     table = "project_project"
     rows = self.db.execute("SELECT id FROM %s WHERE bi_id IS NULL" % table)
     values = ["(%d, %d)" % (r[0], bi_hash(r[0])) for r in rows]
     while values:
         chunk, values = values[:PG_CHUNK], values[PG_CHUNK:]
         self.db.execute(
             """
             UPDATE %s AS t
             SET
               bi_id = c.bi_id
             FROM (
               VALUES
               %s
             ) AS c(id, bi_id)
             WHERE c.id = t.id
             """
             % (table, ",\n".join(chunk))
         )
     self.db.execute("ALTER TABLE %s ALTER bi_id SET NOT NULL" % table)
Esempio n. 14
0
 def get_ajax_data(self, **kwargs):
     # Parse query params
     query = self.decode_query(
         self.handler.get_argument("key")
     )  # type: List[Tuple[int, int, str]]
     # Get metrics
     from_ts = datetime.datetime.now() - datetime.timedelta(seconds=1800)
     from_ts = from_ts.replace(microsecond=0)
     interface_sql = """
       SELECT
         managed_object,
         path[4] AS iface,
         argMax(load_in, ts) AS load_in,
         argMax(load_out, ts) AS load_out,
         argMax(packets_in, ts) AS packets_in,
         argMax(packets_out, ts) AS packets_out,
         argMax(errors_in, ts) AS errors_in,
         argMax(errors_out, ts) AS errors_out
       FROM interface
       WHERE
         date >= toDate('%s')
         AND ts >= toDateTime('%s')
         AND (%s)
       GROUP BY managed_object, iface
     """ % (
         from_ts.date().isoformat(),
         from_ts.isoformat(sep=" "),
         " OR ".join(
             "(managed_object=%d AND path[4]='%s')" % (q[1], q[2].replace("'", "''"))
             for q in query
         ),
     )
     # Get data
     metrics = []  # type: List[Tuple[int, str, str, str, str, str]]
     ch = ch_connection()
     try:
         for (
             mo,
             iface,
             load_in,
             load_out,
             packets_in,
             packets_out,
             errors_in,
             errors_out,
         ) in ch.execute(post=interface_sql):
             if_hash = str(bi_hash(iface))
             metrics += [
                 # (mo, if_hash, "speed", self.humanize_metric(speed)),
                 (mo, if_hash, "load_in", self.humanize_metric(load_in)),
                 (mo, if_hash, "load_out", self.humanize_metric(load_out)),
                 (mo, if_hash, "packets_in", self.humanize_metric(packets_in)),
                 (mo, if_hash, "packets_out", self.humanize_metric(packets_out)),
                 (mo, if_hash, "errors_in", self.humanize_metric(errors_in)),
                 (mo, if_hash, "errors_out", self.humanize_metric(errors_out)),
             ]
     except ClickhouseError:
         pass
     # Set defaults
     m_index = set()  # type: Set[Tuple[int, str]]
     for mo_bi_id, iface, _, _ in metrics:
         m_index.add((int(mo_bi_id), iface))
     interface_metrics = {
         "speed",
         "load_in",
         "load_out",
         "packets_in",
         "packets_out",
         "errors_in",
         "errors_out",
     }
     for _, mo_bi_id, iface in query:
         if (int(mo_bi_id), str(bi_hash(iface))) not in m_index:
             for metric in interface_metrics:
                 metrics += [(str(mo_bi_id), str(bi_hash(iface)), metric, "-")]
     # managed object id -> bi id
     mo_map = {q[0]: q[1] for q in query}  # type: Dict[int, int]
     # Get interface statuses
     for doc in Interface._get_collection().find(
         {"$or": [{"managed_object": q[0], "name": q[2]} for q in query]},
         {
             "_id": 0,
             "managed_object": 1,
             "name": 1,
             "admin_status": 1,
             "oper_status": 1,
             "in_speed": 1,
             "out_speed": 1,
             "full_duplex": 1,
         },
     ):
         mo = str(mo_map[doc["managed_object"]])
         if_hash = str(bi_hash(doc["name"]))
         status = 0
         if "admin_status" in doc and doc["admin_status"]:
             status = 2 if doc["oper_status"] else 1
         duplex = "-"
         if "full_duplex" in doc:
             duplex = "Full" if doc["full_duplex"] else "Half"
         speed = "-"
         if "in_speed" in doc:
             speed = self.humanize_metric(doc["in_speed"] * 1000)
         metrics += [
             (mo, if_hash, "speed", speed),
             (mo, if_hash, "duplex", duplex),
             (mo, if_hash, "status", status),
         ]
     # Get current object statuses
     obj_statuses = ObjectStatus.get_statuses(list(mo_map))
     statuses = {str(mo_map[mo_id]): obj_statuses.get(mo_id, True) for mo_id in obj_statuses}
     return {"metrics": metrics, "statuses": list(statuses.items())}