def _handle(self, *args, **options): if len(args) < 1: self._usage() r = args[0].split(".") if len(r) != 2: self._usage() app, model = r load_models() m = apps.get_model(app, model) if not m: return self._usage() # try: resolve = { "fail": IR_FAIL, "skip": IR_SKIP, "update": IR_UPDATE }[options["resolve"]] except KeyError: raise CommandError("Invalid resolve option: %s" % options["resolve"]) # Begin import for f in args[1:]: print("Importing %s" % f) with open(f) as f: count, error = csv_import(m, f, resolution=resolve, delimiter=options["delimiter"]) if count is None: raise CommandError(error) else: print("... %d rows imported/updated" % count)
def handle(self, *args, **options): if len(args) < 1: print("USAGE: %s <model> <object id> [.. <object id>]" % sys.argv[0]) sys.exit(1) m = args[0].replace("-", "_") connect() if m not in self.models: raise CommandError("Invalid model '%s'. Valid models are: %s" % (m, ", ".join(self.models))) objects = [] getter = getattr(self, "get_%s" % m) wiper = getattr(self, "wipe_%s" % m) # Get objects for o_id in args[1:]: o = getter(o_id) if not o: # Not found raise CommandError("Object '%s' is not found" % o_id) objects += [o] # Wipe objects from noc.core.debug import error_report with bulk_datastream_changes(): for o in objects: with self.log("Wiping '%s':" % unicode(o), True): try: wiper(o) except KeyboardInterrupt: raise CommandError( "Interrupted. Wiping is not complete") except Exception: error_report()
def handle(self, *args, **options): # parsers = [] # Read config config = SafeConfigParser() for p in self.get_parsers(): config.read(os.path.join("etc", "address", "%s.defaults" % p)) config.read(os.path.join("etc", "address", "%s.conf" % p)) if config.getboolean(p, "enabled"): m = __import__("noc.gis.parsers.address.%s" % p, {}, {}, "*") for l in dir(m): a = getattr(m, l) if inspect.isclass(a) and issubclass( a, AddressParser) and a != AddressParser: parsers += [a] else: print("Parser '%s' is not enabled. Skipping.." % p) # Initialize parsers parsers = [p(config, options) for p in parsers] # Download if options["download"]: for p in parsers: print("Downloading", p.name) if not p.download(): raise CommandError("Failed to download %s" % p.name) else: print("Skipping downloads") # Sync try: for p in parsers: print("Syncing", p.name) p.sync() except Exception: error_report()
def handle(self, *args, **options): ctr = options.get("countries", []) print(ctr) print(options) # Check countries for c in ctr: if c not in self.HEADERS or c not in self.DATA: raise CommandError("Unsupported country: %s" % c) # header = ["LEVEL%d" % d for d in range(self.LEVELS)] header += [ "STREET", "HOUSE_ADDR", "NUM", "NUM2", "NUM_LETTER", "BUILD", "BUILD_LETTER", "STRUCT", "STRUCT2", "STRUCT_LETTER", "POSTAL_CODE", ] for c in ctr: header += self.HEADERS[c] writer = csv.writer(sys.stdout) writer.writerow(header) for d in Division.get_top(): self.dump_division(writer, d, ctr, [])
def handle_get(self, objects): ol = [] for o_id in objects: o = self.get_object(o_id) if not o: raise CommandError("Object not found: %s" % o_id) ol += [o] for o in ol: print(self.get_value(o))
def handle(self, *args, **options): try: self._handle(*args, **options) except CommandError as why: raise CommandError(why) except SystemExit: pass except Exception: error_report()
def create_file(self, path, data): self.print(" Writing file %s ..." % path, ) try: with open(path, "w") as f: f.write(data) self.print("done") except OSError as e: self.print("failed:", e) raise CommandError("Failed to write file")
def create_dir(self, path): if os.path.isdir(path): return self.print(" Creating directory %s ..." % path) try: os.mkdir(path) self.print("done") except OSError as e: self.print("failed:", e) raise CommandError("Failed to create directory")
def handle(self, *args, **options): # se_db_updated = self.update_se_db() # Prepare options opts = [] if se_db_updated: opts += ["-a"] docset = set(args) # Prepare environment env = os.environ.copy() env["PYTHONPATH"] = ":".join(sys.path) env["PATH"] = os.path.abspath(os.path.join("contrib", "bin")) + ":" + env["PATH"] # Rebuild all documentation for conf in glob.glob("share/docs/*/*/conf.py"): d, f = os.path.split(conf) dn = d.split(os.sep) if docset and dn[-1] not in docset: continue if dn[-1] == "code": self.update_code_toc(d) target = os.path.abspath( os.path.join(d, "..", "..", "..", "..", "static", "doc", dn[-2], dn[-1])) doctrees = os.path.join(target, "doctrees") html = os.path.join(target, "html") for p in [doctrees, html]: if not os.path.isdir(p): try: os.makedirs(p) except OSError: raise CommandError("Unable to create directory: %s" % p) cmd = ["sphinx-build"] cmd += opts cmd += [ "-b", "html", "-d", doctrees, "-D", "latex_paper_size=a4", ".", html ] try: subprocess.call(cmd, cwd=d, env=env) except OSError: raise CommandError("sphinx-build not found")
def handle_add(self, *args, **options): """ Add link :param args: :param options: :return: """ if len(args) != 2: raise CommandError("Usage: ./noc link --add <iface1> <iface2>") i1 = Interface.get_interface(args[0]) if not i1: raise CommandError("Invalid interface: %s" % args[0]) i2 = Interface.get_interface(args[1]) if not i2: raise CommandError("Invalid interface: %s" % args[1]) try: i1.link_ptp(i2) except ValueError as why: raise CommandError(str(why))
def handle(self, *args, **options): # Check expression if len(args) != 1: raise CommandError("No expression given") expression = args[0] # Process profile profile = None if options["profile"]: profile = Profile.get_by_name(options["profile"]) if not profile: raise CommandError("Invalid profile: %s" % options["profile"]) # Create output try: out = open(options["output"], "w") except IOError as e: raise CommandError(str(e)) # Build self.build_prefix_list(out, expression, options["name"], profile) # Finalize out.close()
def handle_add(self, *args, **options): if "username" not in options: raise CommandError("Username is not set") if "email" not in options: raise CommandError("Email is not set") if "pwgen" in options: passwd = "".join( random.choice(self.pwset) for _ in range(self.PWLEN)) else: passwd = None if not passwd: raise CommandError("Password is not set") permissions = set() if not options.get("template"): raise CommandError("template permission not set") for t in options["template"]: if t not in self.TEMPLATES: raise CommandError("Invalid template '%s'" % t) permissions.update(self.TEMPLATES[t]) if not permissions: raise CommandError("No permissions set") # Create user u = User(username=options["username"], email=options["email"], is_active=True) u.set_password(passwd) u.save() for p in permissions: try: perm = Permission.objects.get(name=p) except Permission.DoesNotExist: perm = Permission(name=p) perm.save() perm.users.add(u) print(passwd)
def get_manifest(self): if os.path.exists(".hg"): # Repo found proc = subprocess.Popen(["hg", "locate"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = proc.communicate() mf = stdout.splitlines() elif os.path.exists("MANIFEST"): with open("MANIFEST") as f: mf = f.read().splitlines() else: raise CommandError("Cannot find manifest") return mf
def handle_mirror(self, *args): mirror = config.path.config_mirror_path if not mirror: raise CommandError("No mirror path set") mirror = os.path.realpath(mirror) self.out("Mirroring") if self.repo == "sa.managedobject.config": for o in ManagedObject.objects.filter(is_managed=True): v = self.get_value(o) if v: if self.split == 'pool': mpath = os.path.realpath( os.path.join(mirror, unicode(o.pool.name), unicode(o))) else: mpath = os.path.realpath( os.path.join(mirror, unicode(o))) if mpath.startswith(mirror): self.out(" mirroring %s" % o) safe_rewrite(mpath, v) else: self.out(" !!! mirror path violation for" % o) self.out("Done")
def handle(self, *args, **options): # Template variables vars = {"year": str(datetime.datetime.now().year), "model": None} # Detect templateset templateset = "application" if options["model"]: templateset = "modelapplication" vars["model"] = options["model"] if options["report"]: templateset = {"simple": "simplereport"}[options["report"]] # Check templateset ts_root = os.path.join("templates", "newapp", templateset) if not os.path.isdir(ts_root): raise CommandError("Inconsistent template set %s" % templateset) # Get installed modules modules = set([a[4:] for a in INSTALLED_APPS if a.startswith("noc.")]) # Fill templates for app in args: self.print("Creating skeleton for %s" % app) m, a = app.split(".", 1) if "." in a: raise CommandError( "Application name must be in form <module>.<app>") if m not in modules: raise CommandError("Invalid module: %s" % m) # Fill template variables tv = vars.copy() tv["module"] = m tv["app"] = a # Initialize model if necessary if tv["model"]: tv["requires"] = ["NOC.%s.%s.Model" % (m, tv["model"].lower())] tv["modelimport"] = "noc.%s.models.%s" % (m, a) models = __import__(tv["modelimport"], {}, {}, tv["model"]) model = getattr(models, tv["model"]) if model is None: tv["modelimport"] = "noc.%s.models" % m models = __import__(tv["modelimport"], {}, {}, "*") model = getattr(models, tv["model"]) if issubclass(model, Model): # Model fields = [{"type": "int", "name": "id"}] for f in model._meta.fields: if f.name == "id": continue fc = f.__class__.__name__ if fc in ("ForeignKey", "OneToOneField"): # Foreign key fr = f.rel.to rc = "%s.%s" % (fr.__module__.split(".")[1], fr.__name__.lower()) fd = { "type": "int", "name": f.name, "label": unicode(f.verbose_name), "blank": f.null, "widget": "%s.LookupField" % rc } fields += [fd] fd = { "type": "string", "name": "%s__label" % f.name, "persist": False } fields += [fd] tv["requires"] += ["NOC.%s.LookupField" % rc] else: fd = { "type": self.model_map[fc][0], "name": f.name, "label": unicode(f.verbose_name), "blank": f.null, "widget": self.model_map[fc][1] } if f.default != NOT_PROVIDED and not callable( f.default): fd["default"] = f.default fields += [fd] tv["base_class"] = "ExtModelApplication" else: # Document fields = [{"type": "string", "name": "id"}] for n, f in model._fields.items(): if n == "id": continue fc = f.__class__.__name__ if fc == "ForeignKeyField": ft = "int" else: ft = self.document_ext_type[fc] fd = { "type": ft, "name": n, "label": unicode(n), "blank": not f.required } if f.default: fd["default"] = f.default fields += [fd] tv["base_class"] = "ExtDocApplication" tv["fields"] = fields # Format fields for models if "fields" in tv: # Model fields fields = [] for f in tv["fields"]: ff = [("name", f["name"]), ("type", f["type"])] if "default" in f and f["type"] != "auto": ff += [("defaultValue", f["default"])] if "persist" in f: ff += [("persist", f["persist"])] fields += [ff] tv["js_fields"] = self.to_js(fields, 1) # Form fields form_fields = [] for f in [x for x in tv["fields"] if "widget" in x]: ff = [("name", f["name"]), ("xtype", f["widget"])] if f["widget"] == "checkboxfield": ff += [("boxLabel", f["label"])] else: ff += [("fieldLabel", f["label"])] ff += [("allowBlank", f["blank"])] form_fields += [ff] tv["js_form_fields"] = self.to_js(form_fields, 1) # Check applications is not exists app_root = os.path.join("services", "web", "apps", m, a) if os.path.exists(app_root): raise CommandError("Application %s is already exists" % app) # Create apps/__init__.py if missed ui_root = os.path.join("ui", "web", m, a) # Create application directory self.create_dir(app_root) # Fill templates for dirpath, dirnames, files in os.walk(ts_root): dp = dirpath.split(os.sep)[3:] # strip templates/newapp/<ts>/ # Create directories for fn in files: if fn == "DELETE": continue # Fill template with open(os.path.join(dirpath, fn)) as f: template = f.read() content = Template(template).render(Context(tv)) content = self.compact(content) # Write template if fn.endswith(".js.j2"): pp = [ui_root] + dp[:-1] dn = os.path.join(*pp) else: pp = [app_root] + dp dn = os.path.join(*pp) self.create_dir(dn) self.create_file(os.path.join(dn, fn[:-3]), content)
def handle(self, *args, **options): # Check AFI afi = options["afi"].lower() if afi.startswith("ipv"): afi = afi[3:] elif afi.startswith("ip"): afi = afi[2:] if afi not in ("4", "6"): raise CommandError("Invalid AFI: Must be one of 4, 6") # Check graphviz options ext = None if options["output"]: ext = os.path.splitext(options["output"])[-1] if ext in self.GV_FORMAT: # @todo: Check graphvis pass elif ext not in ".dot": raise CommandError("Unknown output format") if options["layout"] not in self.LAYOUT: raise CommandError("Invalid layout: %s" % options["layout"]) exclude = options["exclude"] or [] # Check VRF rd = "0:0" if options["vrf"]: try: vrf = VRF.objects.get(name=options["vrf"]) rd = vrf.rd except VRF.DoesNotExist: if is_rd(options["vrf"]): rd = options["vrf"] else: raise CommandError("Invalid VRF: %s" % options["vrf"]) self.mo_cache = {} self.fi_cache = {} self.rd_cache = {} self.p_power = defaultdict(int) out = ["graph {"] out += [" node [fontsize=12];"] out += [" edge [fontsize=8];"] out += [" overlap=scale;"] # out += [" splines=true;"] objects = set() prefixes = set() interfaces = list(self.get_interfaces(afi, rd, exclude=exclude)) if options["core"]: interfaces = [ si for si in interfaces if self.p_power[si.prefix] > 1 ] for si in interfaces: o_id = "o_%s" % si.object p_id = "p_%s" % si.prefix.replace(".", "_").replace( ":", "__").replace("/", "___") if si.object not in objects: objects.add(si.object) o = self.get_object(si.object) if not o: continue out += [ " %s [shape=box;style=filled;label=\"%s\"];" % (o_id, o.name) ] if si.prefix not in prefixes: prefixes.add(si.prefix) out += [ " %s [shape=ellipse;label=\"%s\"];" % (p_id, si.prefix) ] out += [ " %s -- %s [label=\"%s\"];" % (o_id, p_id, si.interface) ] out += ["}"] data = "\n".join(out) if ext is None: print data elif ext == ".dot": with open(options["output"], "w") as f: f.write(data) else: # Pass to grapviz with tempfile.NamedTemporaryFile(suffix=".dot") as f: f.write(data) f.flush() subprocess.check_call([ options["layout"], "-T%s" % self.GV_FORMAT[ext], "-o%s" % options["output"], f.name ])
def import_zone(self, path, zone_profile, address_profile, dry_run=False, force=False, clean=False): self.print("Loading zone file '%s'" % path) self.print("Parsing zone file using BIND parser") with open(path) as f: rrs = self.iter_bind_zone_rr(f) try: soa = next(rrs) except StopIteration: raise CommandError("Unable to parse zone file from %s" % path) zone = self.from_idna(soa.zone) z = DNSZone.get_by_name(zone) if z: self.print("Using existing zone '%s'" % zone) else: self.print("Creating zone '%s'" % zone) z = DNSZone(name=zone, profile=zone_profile) clean = False # Nothing to clean if z.profile.id != zone_profile.id: self.print("Setting profile to '%s'" % zone_profile.name) z.profile = zone_profile # Apply changes if dry_run: z.clean() # Set type else: z.save() # Clean zone when necessary if clean: self.print("Cleaning zone") for rr in DNSZoneRecord.objects.filter(zone=z): self.print("Removing %s %s" % (rr.type, rr.name)) if not dry_run: rr.delete() # Populate zone vrf = VRF.get_global() zz = zone + "." lz = len(zz) if z.is_forward: zp = None elif z.is_reverse_ipv4: # Calculate prefix for reverse zone zp = ".".join(reversed(zone[:-13].split("."))) + "." elif z.is_reverse_ipv6: raise CommandError("IPv6 reverse import is not implemented") else: raise CommandError("Unknown zone type") for rr in rrs: name = rr.name if name.endswith(zz): name = name[:-lz] if name.endswith("."): name = name[:-1] # rr = None # Skip zone NS if rr.type == "NS" and not name: continue if rr.type in ("A", "AAAA"): self.create_address( zone, vrf, rr.rdata, "%s.%s" % (name, zone) if name else zone, address_profile, dry_run=dry_run, force=force, ) elif rr.type == "PTR": if "." in name: address = zp + ".".join(reversed(name.split("."))) else: address = zp + name self.create_address(zone, vrf, address, rr.rdata, address_profile, dry_run=dry_run, force=force) else: zrr = DNSZoneRecord( zone=z, name=name, type=rr.type, ttl=rr.ttl, priority=rr.priority, content=rr.rdata, ) self.print("Creating %s %s" % (rr.type, rr.name)) if not dry_run: zrr.save()
def handle(self, *args, **options): self.repo = options.get("repo") self.split = options.get("split") if not self.repo or self.repo not in ["sa.managedobject.config"]: raise CommandError("Invalid repo") return getattr(self, "handle_%s" % options["cmd"])(args)
def handle(self, *args, **options): connect() if options["verbosity"] >= 2: self.logger.setLevel(logging.DEBUG) else: self.logger.setLevel(logging.INFO) for h in self.logger.handlers: h.setFormatter( logging.Formatter("%(asctime)s [%(levelname)s] %(message)s")) if not options["routerdb"]: raise CommandError("No routerdb given") if not options["cloginrc"]: raise CommandError("No cloginrc given") if not options["hosts"]: options["hosts"] = ["/etc/hosts"] if not options["repo"]: raise CommandError("No CVS repository") repo_prefix = options.get("repoprefix") or "" if not options["object_profile"]: raise CommandError("No object profile set") try: object_profile = ManagedObjectProfile.objects.get( name=options["object_profile"].strip()) except ManagedObjectProfile.DoesNotExist: raise CommandError("Invalid object profile: %s" % options["object_profile"]) if not options["domain"]: raise CommandError("No administrative domain set") try: domain = AdministrativeDomain.objects.get( name=options["domain"].strip()) except AdministrativeDomain.DoesNotExist: raise CommandError("Invalid administrative domain: %s" % options["domain"]) if not options["pool"]: raise CommandError("No pool set") try: pool = Pool.objects.get(name=options["domain"].strip()) except Pool.DoesNotExist: raise CommandError("Invalid pool: %s" % options["pool"]) shard_member = 0 shard_members = 0 if options.get("shard"): shard = options["shard"] if "/" not in shard: raise CommandError("Shard must be <member>/<members>") shard_member, shard_members = [int(x) for x in shard.split("/")] tags = [] if options["tags"]: for t in options["tags"]: tags += [x.strip() for x in t.split(",")] self.dry_run = bool(options["dry_run"]) # if not os.path.isdir(self.TMP): os.mkdir(self.TMP) # revisions = self.index_cvs(options["repo"]) # Read configs hosts = self.parse_hosts(options["hosts"]) rdb = self.parse_routerdb(options["routerdb"]) login, ldefaults = self.parse_cloginrc(options["cloginrc"]) # Process data n = 0 count = len(rdb) for name in sorted(rdb): if shard_members: if n % shard_members != shard_member: n += 1 continue # Processed by other shard self.logger.debug("[%s/%s] Processing host %s", n, count, name) n += 1 profile = Profile.get_by_name(rdb[name]) address = hosts.get(name) if not address: # @todo: Resolve self.logger.info("Cannot resolve address for %s. Skipping", name) continue ld = login.get(name, ldefaults) if not ld: self.logger.info("No credentials for %s. Skipping", name) continue user = ld.get("user") password = ld.get("password") if "method" in ld and "ssh" in ld["method"]: method = "ssh" else: method = "telnet" self.logger.info( "Managed object found: %s (%s, %s://%s@%s/)", name, profile.name, method, user, address, ) if not self.dry_run: try: mo = ManagedObject.objects.get( Q(name=name) | Q(address=address)) self.logger.info("Already in the database") except ManagedObject.DoesNotExist: self.logger.info("Creating managed object %s", name) mo = ManagedObject( name=name, object_profile=object_profile, administrative_domain=domain, pool=pool, scheme=SSH if method == "ssh" else TELNET, address=address, profile=profile, user=user, password=password, trap_source_ip=address, tags=tags, ) mo.save() if name not in revisions: self.logger.error("Cannot find config for %s", name) continue if not self.dry_run: self.import_revisions(options["repo"], repo_prefix, mo, name, revisions[name])
def import_zone( self, path=None, axfr=False, zone_profile=None, address_profile=None, transfer_zone=None, nameserver=None, source_address=None, dry_run=False, force=False, clean=False, ): self.print("Loading zone file '%s'" % path) self.print("Parsing zone file using BIND parser") if path: with open(path) as f: rrs = self.iter_bind_zone_rr(f) try: soa = next(rrs) except StopIteration: raise CommandError("Unable to parse zone file from %s" % path) zone = self.from_idna(soa.zone) z = self.dns_zone(zone, zone_profile, dry_run, clean) # Populate zone vrf = VRF.get_global() zz = zone + "." lz = len(zz) if z.is_forward: zp = None elif z.is_reverse_ipv4: # Calculate prefix for reverse zone zp = ".".join(reversed(zone[:-13].split("."))) + "." elif z.is_reverse_ipv6: raise CommandError( "IPv6 reverse import is not implemented") else: raise CommandError("Unknown zone type") for rr in rrs: name = rr.name if name.endswith(zz): name = name[:-lz] if name.endswith("."): name = name[:-1] # rr = None # Skip zone NS if rr.type == "NS" and not name: continue if rr.type in ("A", "AAAA"): self.create_address( zone, vrf, rr.rdata, "%s.%s" % (name, zone) if name else zone, address_profile, dry_run=dry_run, force=force, ) elif rr.type == "PTR": if "." in name: address = zp + ".".join(reversed(name.split("."))) else: address = zp + name self.create_address( zone, vrf, address, rr.rdata, address_profile, dry_run=dry_run, force=force, ) else: zrr = DNSZoneRecord( zone=z, name=name, type=rr.type, ttl=rr.ttl, priority=rr.priority, content=rr.rdata, ) self.print("Creating %s %s" % (rr.type, rr.name)) if not dry_run: zrr.save() if axfr: data = self.load_axfr(nameserver, transfer_zone, source_address) zone = self.from_idna(transfer_zone) z = self.dns_zone(zone, zone_profile, dry_run, clean) # Populate zone vrf = VRF.get_global() zz = zone + "." lz = len(zz) if z.is_forward: zp = None elif z.is_reverse_ipv4: # Calculate prefix for reverse zone zp = ".".join(reversed(zone[:-13].split("."))) + "." elif z.is_reverse_ipv6: raise CommandError("IPv6 reverse import is not implemented") else: raise CommandError("Unknown zone type") for row in data: row = row.strip() if row == "" or row.startswith(";"): continue row = row.split() if len(row) != 5 or row[2] != "IN" or row[3] not in ("A", "AAAA", "PTR"): continue if row[3] in ("A", "AAAA"): name = row[0] if name.endswith(zz): name = name[:-lz] if name.endswith("."): name = name[:-1] self.create_address( zone, vrf, row[4], "%s.%s" % (name, zone) if name else zone, address_profile, dry_run=dry_run, force=force, ) if row[3] == "PTR": name = row[4] if name.endswith(zz): name = name[:-lz] if name.endswith("."): name = name[:-1] # @todo: IPv6 if "." in row[0]: address = ".".join(reversed(row[0].split(".")[:-3])) else: address = zp + name fqdn = row[4] if fqdn.endswith("."): fqdn = fqdn[:-1] self.create_address(zone, vrf, address, fqdn, address_profile, dry_run=dry_run, force=force)