def __init__(self, *args, **kwargs): ExtApplication.__init__(self, *args, **kwargs) # # Parse themes self.default_theme = config.get("customization", "default_theme") self.themes = {} # id -> {name: , css:} for o in config.options("themes"): if o.endswith(".name"): theme_id = o[:-5] nk = "%s.name" % theme_id ek = "%s.enabled" % theme_id if (config.has_option("themes", nk) and config.has_option("themes", ek) and config.getboolean("themes", ek)): self.themes[theme_id] = { "id": theme_id, "name": config.get("themes", nk).strip(), "css": "/static/pkg/extjs/packages/ext-theme-%s/build/resources/ext-theme-%s-all.css" % (theme_id, theme_id), "js": "/static/pkg/extjs/packages/ext-theme-%s/build/ext-theme-%s.js" % (theme_id, theme_id) } # Login restrictions self.restrict_to_group = self.get_group( config.get("authentication", "restrict_to_group")) self.single_session_group = self.get_group( config.get("authentication", "single_session_group")) self.mutual_exclusive_group = self.get_group( config.get("authentication", "mutual_exclusive_group")) self.idle_timeout = config.getint("authentication", "idle_timeout")
def forwards(self): db = get_db() if db.noc.pm.db.count() == 0: ## Create PMDB db.noc.pm.db.insert({ "name": "default", "database": db.name, "host": db.connection.host, "port": db.connection.port, "user": config.get("nosql_database", "user"), "password": config.get("nosql_database", "password") }) ## Create PMStorage db_id = db.noc.pm.db.find()[0]["_id"] db.noc.pm.storage.insert({ "db": db_id, "name": "default", "collection": "noc.ts.default", "raw_retention": 86400 }) ## Create PMProbe db.noc.pm.probe.insert({ "name": "default", "is_active": True })
def backup_mongo(self): """ Backup mongodb database """ now = datetime.datetime.now() f_out = "noc-mongo-%04d-%02d-%02d-%02d-%02d" % ( now.year, now.month, now.day, now.hour, now.minute) out = os.path.join(config.get("path", "backup_dir"), f_out) try: os.mkdir(out) except OSError as e: self.error("Cannot create directory %s: %s" % (out, why)) return False cmd = [config.get("path", "mongodump"), "-d", config.mongo.db, "-o", out, "-h", config.mongo_connection_args["url"]] if config.mongo.user: cmd += ["-u", config.mongo.user] if config.mongo.password: cmd += ["-p", config.mongo.password] self.info("Dumping MongoDB database into %s" % out) retcode = self.subprocess_call(cmd) if retcode: self.error("dump failed. Removing broken dump %s" % out) self.safe_unlink(out) return False self.info("Archiving dump") r = self.tar(out + ".tar.gz", [config.mongo.db], cwd=out) self.safe_unlink(out) return r
def backup_repo(self): """ Backup repo """ now = datetime.datetime.now() repo_root = config.get("cm", "repo") repo_out = "noc-repo-%04d-%02d-%02d-%02d-%02d.tar.gz" % (now.year, now.month, now.day, now.hour, now.minute) repo_out = os.path.join(config.get("path", "backup_dir"), repo_out) self.info("dumping repo into %s" % repo_out) self.tar(repo_out, [f for f in os.listdir(repo_root) if not f.startswith(".")], cwd=repo_root) return True
def __init__(self): super(NOCADBackend, self).__init__() self.server = config.get("authentication", "ad_server") self.bind_method = config.get("authentication", "ad_bind_method") self.bind_dn = config.get("authentication", "ad_bind_dn") self.bind_password = config.get("authentication", "ad_bind_password") self.users_base = config.get("authentication", "ad_users_base") self.users_filter = config.get("authentication", "ad_users_filter") self.required_group = config.get("authentication", "ad_required_group") self.requred_filter = config.get("authentication", "ad_required_filter") self.superuser_group = config.get("authentication", "ad_superuser_group") self.superuser_filter = config.get("authentication", "ad_superuser_filter")
def convert_link(cls, kb_entry, link, text=None): if text is None: text = link if link.startswith("KB") and is_int(link[2:]): return u"<a href='/kb/view/%s/'>%s</a>" % (link[2:], text) elif link.startswith("TT"): tt = {"tt": link[2:]} tt_url = config.get("tt", "url", tt) % tt return u"<a href='%s'>%s</a>" % (tt_url, text) elif link.startswith("attach:"): if text == link: text = link[7:] link = link[7:] return u"<a href='/kb/view/%d/attachment/%s/'>%s</a>" % ( kb_entry.id, link, text) elif link.startswith("attachment:"): if text == link: text = link[11:] link = link[11:] return u"<a href='/kb/%d/attachment/%s/'>%s</a>" % (kb_entry.id, link, text) else: try: le = kb_entry.__class__.objects.get(subject=link) return u"<a href='/kb/view/%s/'>%s</a>" % (le.id, text) except kb_entry.__class__.DoesNotExist: return u"<a href='%s'>%s</a>" % (link, text)
def __init__(self, job, enabled=True, to_save=False): super(PrefixReport, self).__init__(job, enabled=enabled, to_save=to_save) self.prefix_state_map = self.get_state_map( config.get("prefix_discovery", "change_state")) self.new_prefixes = []
def __init__(self, tick_callback=None, polling_method=None, controller=None, write_delay=True, metrics_prefix=None): if not metrics_prefix: metrics_prefix = "noc." metrics_prefix += "socketfactory" self.metrics = MetricsHub(metrics_prefix, "sockets.count", "sockets.register", "sockets.unregister", "loops", "ticks", "handle.reads", "handle.closed_reads", "handle.writes") self.sockets = {} # fileno -> socket self.socket_name = {} # socket -> name self.name_socket = {} # name -> socket self.new_sockets = [] # list of (socket,name) self.tick_callback = tick_callback self.to_shutdown = False self.register_lock = RLock( ) # Guard for register/unregister operations self.controller = controller # Reference to controlling daemon if polling_method is None: # Read settings if available try: from noc.settings import config polling_method = config.get("main", "polling_method") except ImportError: polling_method = "select" self.poller = get_poller(polling_method) # Performance data self.cnt_polls = 0 # Number of polls self.write_delay = write_delay if not self.write_delay: self.control = PipeSocket(self)
def execute(self): def format_table(l): mw=max([len(n) for n,d in l]) m="%%%ds | %%s"%mw out=[m%("Domain","Expiration date")] for n,d in l: out+=[m%(n,DateFormat(d).format(date_format))] return "\n".join(out) # from noc.main.models import SystemNotification from noc.dns.models import DNSZone date_format=config.get("main","date_format") now=datetime.date.today() ## Check expired soon domains days=config.getint("dns","warn_before_expired_days") soon_expired=list([(z.name,z.paid_till) for z in DNSZone.objects.filter(paid_till__isnull=False,paid_till__range=[now+datetime.timedelta(days=1),now+datetime.timedelta(days=days)]).order_by("paid_till")]) if soon_expired: SystemNotification.notify("dns.domain_expiration_warning", subject="%d domains to be expired in %d days"%(len(soon_expired),days), body="Following domains are to be expired in %d days:\n"%days+format_table(soon_expired) ) ## Check expired domains expired=list([(z.name,z.paid_till) for z in DNSZone.objects.filter(paid_till__isnull=False,paid_till__lte=now).order_by("paid_till")]) if expired: SystemNotification.notify("dns.domain_expired", subject="%d domains are expired"%(len(expired)), body="Following domains are expired:\n"+format_table(expired) ) return True
def __init__(self, job, enabled=True, to_save=False, allow_prefix_restrictions=False): super(IPReport, self).__init__(job, enabled=enabled, to_save=to_save) self.ip_state_map = self.get_state_map( config.get("ip_discovery", "change_state")) self.new_addresses = [] self.collisions = [] self.locked_ranges = {} # VRF -> [(from ip, to ip)] self.allow_prefix_restrictions = allow_prefix_restrictions # Initialize solutions self.get_fqdn = get_solution(config.get("ip_discovery", "get_fqdn")) self.get_description = get_solution( config.get("ip_discovery", "get_description"))
def cmd_out(self, cmd, check=True): if check: self.check_repository() p = subprocess.Popen([config.get("cm", "vcs_path")] + cmd, stdout=subprocess.PIPE, cwd=self.repo) d = p.stdout.read() return d
def initialize(cls, scheduler): super(InterfaceDiscoveryJob, cls).initialize(scheduler) cls.get_interface_profile = None if scheduler.daemon: # Compile classification rules sol = config.get("interface_discovery", "get_interface_profile") if sol: cls.get_interface_profile = staticmethod(get_solution(sol))
def __init__(self): super(NOCLDAPBackend, self).__init__() self.server = config.get("authentication", "ldap_server") self.bind_method = config.get("authentication", "ldap_bind_method") self.bind_dn = config.get("authentication", "ldap_bind_dn") self.bind_password = config.get("authentication", "ldap_bind_password") self.users_base = config.get("authentication", "ldap_users_base") self.users_filter = config.get("authentication", "ldap_users_filter") self.required_group = config.get("authentication", "ldap_required_group") self.requred_filter = config.get("authentication", "ldap_required_filter") self.superuser_group = config.get("authentication", "ldap_superuser_group") self.superuser_filter = config.get("authentication", "ldap_superuser_filter") self.start_tls = config.getboolean("authentication", "ldap_start_tls")
def api_about(self, request): cp = CPClient() return { "version": get_version(), "installation": config.get("customization", "installation_name"), "system_id": cp.system_uuid, "copyright": "2007-%d, The NOC Project" % datetime.date.today().year }
def tar(self, archive, files, cwd=None): """ Create TAR archive """ if not files: return tar_cmd = [config.get("path", "tar"), "cf", "-"] + files gzip_cmd = [config.get("path", "gzip")] self.debug(("cd %s &&" % cwd if cwd else ".") + " ".join(tar_cmd) + " | " + " ".join(gzip_cmd)) with open(archive, "w") as f: try: p1 = subprocess.Popen(tar_cmd, cwd=cwd, stdout=subprocess.PIPE) p2 = subprocess.Popen(gzip_cmd, stdin=p1.stdout, stdout=f) except OSError as why: self.error("Failed to tar: %s" % why) return False return p2.wait() == 0
def backup_postgres(self): """ Backup postgresql database """ def pgpass_quote(s): return s.replace("\\", "\\\\").replace(":", "\\:") now = datetime.datetime.now() # host, port, database, user, password pgpass = ["*", "*", "*", "*", ""] out = "noc-db-%04d-%02d-%02d-%02d-%02d.dump" % ( now.year, now.month, now.day, now.hour, now.minute) out = os.path.join(config.get("path", "backup_dir"), out) # Build pg_dump command and options cmd = [config.get("path", "pg_dump"), "-Fc"] cmd += ["-f", out] if config.pg.user: cmd += ["-U", config.pg.user] pgpass[3] = config.pg.user if config.pg.password: pgpass[4] = config.pg.password cmd += ["-h", config.pg_connection_args["host"]] pgpass[0] = config.pg_connection_args["host"] if config.pg_connection_args["port"]: cmd += ["-p", str(config.pg_connection_args["port"])] pgpass[1] = config.pg_connection_args["port"] cmd += [settings.DATABASES["default"]["NAME"]] pgpass[2] = settings.DATABASES["default"]["NAME"] # Create temporary .pgpass pgpass_data = ":".join([pgpass_quote(x) for x in pgpass]) pgpass_path = os.path.join(os.getcwd(), "local", "cache", "pgpass", ".pgpass") safe_rewrite(pgpass_path, pgpass_data, mode=0o600) env = os.environ.copy() env["PGPASSFILE"] = pgpass_path # Launch pg_dump self.info("Dumping PostgreSQL database into %s" % out) self.debug(" ".join(cmd)) retcode = self.subprocess_call(cmd, env=env) if retcode != 0: self.error("dump failed. Removing broken dump %s" % out) self.safe_unlink(out) return False self.safe_unlink(pgpass_path) # Remove left pgpass return True
def get_data(self,**kwargs): data=[] bd=config.get("path","backup_dir") if os.path.isdir(bd): r=[] for f in [f for f in os.listdir(bd) if f.startswith("noc-") and (f.endswith(".dump") or f.endswith(".tar.gz"))]: s=os.stat(os.path.join(bd,f)) r.append([f,datetime.datetime.fromtimestamp(s[stat.ST_MTIME]),s[stat.ST_SIZE]]) data=sorted(r,lambda x,y:cmp(x[1],y[1])) return self.from_dataset(title=self.title,columns=["File","Size"],data=data)
def setup_processor(request): favicon_url = config.get("customization", "favicon_url") if favicon_url.endswith(".png"): favicon_mime = "image/png" elif favicon_url.endswith(".jpg") or favicon_url.endswith(".jpeg"): favicon_mime = "image/jpeg" else: favicon_mime = None return { "setup": { "installation_name": config.get("customization", "installation_name"), "logo_url": config.get("customization", "logo_url"), "logo_width": config.get("customization", "logo_width"), "logo_height": config.get("customization", "logo_height"), "favicon_url": favicon_url, "favicon_mime": favicon_mime, } }
def handle(self, *args, **options): db_name = config.get("database", "name") # Check PostGIS is enabled if not check_postgis(): raise CommandError("PostGIS is not installed. "\ "Install PostGIS into '%s' database" % db_name) # Check spatial references are loaded if not check_srs(): raise CommandError("Spatial references not loaded. "\ "Load spatial_ref_sys.sql into "\ "'%s' database" % db_name) # Check osm2pgsql tool present options["osm2pgsql"] = search_path("osm2pgsql") if not options["osm2pgsql"]: raise CommandError("osm2pgsql not found. "\ "Install osm2pgsql and ensure "\ "it is in system $PATH") # Check --file or --bbox option is set if not options["file"] and not options["bbox"]: raise CommandError("Set either --file or --bbox") # Process if options["file"]: # Process existing file rbbox = self.process_file(options["file"], **options) else: # Download and process file # Check --bbox try: bbox = [float(b) for b in options["bbox"].split(",")] except ValueError, why: raise CommandError("Invalid bounding box format: %s" % why) if len(bbox) != 4: raise CommandError("Invalid bounding box format") if not ((-180 <= bbox[0] <= 180) and (-180 <= bbox[2] <= 180)): raise CommandError("Invalid bounding box:"\ "Latitude must be between -180 and 180") if not ((-90 <= bbox[1] <= 90) and (-90 <= bbox[3] <= 90)): raise CommandError("Invalid bounding box:"\ "Longiture must be between -90 and 90") bbox = [min(bbox[0], bbox[2]), min(bbox[1], bbox[3]), max(bbox[0], bbox[2]), max(bbox[1], bbox[3])] # Download and process file with temporary_file() as p: print "Requesting OSM data" url = self.OSM_API_URL + ",".join([str(b) for b in bbox]) print url u = urlopen(url) with open(p, "w") as f: f.write(u.read()) rbbox = self.process_file(p, **options)
def initialize(cls, scheduler): super(PrefixReport, cls).initialize(scheduler) cls.p_custom_pyrule = None if cls.save_prefix: p = config.get("prefix_discovery", "custom_pyrule") r = list( PyRule.objects.filter(name=p, interface="IGetDiscoveryCustom")) if r: scheduler.info("Enabling prefix discovery custom pyRule '%s'" % p) cls.p_custom_pyrule = r[0] else: scheduler.error( "Prefix discovery custom pyRule '%s' is not found. Ignoring." % p)
def forwards(self): repo_root = config.get("cm", "repo") for ot in TYPES: db.add_column( "cm_%s" % ot, "last_modified", models.DateTimeField("Last Modified", blank=True, null=True)) if repo_root: repo = os.path.join(repo_root, TYPES[ot]) for id, repo_path in db.execute( "SELECT id,repo_path FROM cm_%s" % ot): path = os.path.join(repo, repo_path) if os.path.exists(path): lm = datetime.datetime.fromtimestamp( os.stat(path)[stat.ST_MTIME]) db.execute( "UPDATE cm_%s SET last_modified=%%s WHERE id=%%s" % ot, [lm, id])
def handle_mirror(self): mirror = config.get("gridvcs", "mirror.%s" % self.repo) or None if not mirror: raise CommandError("No mirror path set") mirror = os.path.realpath(mirror) self.out("Mirroring") if self.repo == "sa.managedobject.config": for o in ManagedObject.objects.filter(is_managed=True): v = self.get_value(o) if v: mpath = os.path.realpath(os.path.join(mirror, unicode(o))) if mpath.startswith(mirror): self.out(" mirroring %s" % o) safe_rewrite(mpath, v) else: self.out(" !!! mirror path violation for" % o) self.out("Done")
def backup_etc(self): """ Backup etc/ """ now = datetime.datetime.now() etc_out = "noc-etc-%04d-%02d-%02d-%02d-%02d.tar.gz" % (now.year, now.month, now.day, now.hour, now.minute) etc_out = os.path.join(config.get("path", "backup_dir"), etc_out) self.info("dumping etc/ into %s" % etc_out) try: files = [os.path.join("etc", f) for f in os.listdir("etc") if f.endswith(".conf") and not f.startswith(".")] files += [os.path.join("etc", "ssh", f) for f in os.listdir(os.path.join("etc", "ssh")) if not f.startswith(".")] except OSError as why: self.error("Failed to get list of files: %s" % why) return False return self.tar(etc_out, files)
def check_paths(self): """ Verify all executables and directories are exists """ self.info("Checking paths") # Check backup dir is writable b_dir = config.path.backup_dir if not os.access(b_dir, os.W_OK): self.error("%s is not writable" % b_dir) return False # Check binaries for p in ("pg_dump", "mongodump", "tar", "gzip"): path = config.get("path", p) if not os.path.exists(path): self.error("%s is not found" % path) return False if not os.access(path, os.R_OK | os.X_OK): self.error("Permission denied: %s" % path) return True
def setup(cls): from noc.settings import config for opt in config.options("i18n"): if opt.startswith("collections."): cn = opt[12:] if cn.endswith(".allow_fuzzy"): cn = opt[:-12] if cn == "global": cn = None cls.ALLOW_FUZZY[cn] = config.getboolean("i18n", opt) else: if cn == "global": cn = None tr = [ x.strip() for x in config.get("i18n", opt).split(",") ] if "en" not in tr: tr += ["en"] cls.TRANSLATIONS[cn] = tr
def view_desktop(self, request): """ Render application root template """ cp = CPClient() ext_apps = [a for a in self.site.apps if isinstance(self.site.apps[a], ExtApplication) or\ isinstance(self.site.apps[a], ModelApplication)] apps = [a.split(".") for a in sorted(ext_apps)] # Prepare settings favicon_url = config.get("customization", "favicon_url") if favicon_url.endswith(".png"): favicon_mime = "image/png" elif favicon_url.endswith(".jpg") or favicon_url.endswith(".jpeg"): favicon_mime = "image/jpeg" else: favicon_mime = None setup = { "system_uuid": cp.system_uuid, "installation_name": config.get("customization", "installation_name"), "logo_url": config.get("customization", "logo_url"), "logo_width": config.get("customization", "logo_width"), "logo_height": config.get("customization", "logo_height"), "branding_color": config.get("customization", "branding_color"), "branding_background_color": config.get("customization", "branding_background_color"), "favicon_url": favicon_url, "favicon_mime": favicon_mime, "debug_js": config.getboolean("main", "debug_js"), "install_collection": config.getboolean("develop", "install_collection"), "enable_gis_base_osm": config.getboolean("gis", "enable_osm"), "enable_gis_base_google_sat": config.getboolean("gis", "enable_google_sat"), "enable_gis_base_google_roadmap": config.getboolean("gis", "enable_google_roadmap"), "trace_extjs_events": config.getboolean("main", "trace_extjs_events"), "preview_theme": self.get_preview_theme(request) } theme = self.get_theme(request) return self.render( request, "desktop.html", apps=apps, setup=setup, theme=theme, theme_css=self.themes[theme]["css"], theme_js=self.themes[theme]["js"] )
def prepare_classification(): global _get_interface_profile p = config.get("interface_discovery", "classification_pyrule") if p: # Use pyRule r = list(PyRule.objects.filter(name=p, interface="IInterfaceClassification")) if r: # logging.info("Enabling interface classification pyRule '%s'" % p) _get_interface_profile = r[0] else: #logging.error("Interface classification pyRule '%s' is not found. Ignoring" % p) pass elif InterfaceClassificationRule.objects.filter(is_active=True).count(): # Load rules #logging.info("Compiling interface classification rules:\n" # "-----[CODE]-----\n%s\n-----[END]-----" %\ # InterfaceClassificationRule.get_classificator_code()) _get_interface_profile = InterfaceClassificationRule.get_classificator()
def get_backend(): """ Get current authentication backend's instance """ method = config.get("authentication", "method") if method == "local": import localbackend return localbackend.NOCLocalBackend() elif method == "http": import httpbackend return httpbackend.NOCHTTPBackend() elif method == "ldap": import ldapbackend return ldapbackend.NOCLDAPBackend() elif method == "ad": import adbackend return adbackend.NOCADBackend() elif method == "pyrule": import pyrulebackend return pyrulebackend.NOCPyRuleBackend() else: raise ValueError("Invalid authentication method '%s'" % method)
def process_file(self, path, osm2pgsql=None, **kwargs): """ Upload OSM XML to database :returns: Bounding box :rtype: tuple """ # Check file is exists if not os.access(path, os.R_OK): raise CommandError("Cannot read file '%s'" % path) # Run osm2pgsql args = [osm2pgsql, "-m", "-k", "-p", "gis_osm", "-G", "-S", "share/osm2pgsql/default.style", "-d", config.get("database", "name")] if config.get("database", "user"): args += ["-U", config.get("database", "user")] if config.get("database", "password"): args += ["-W"] #, config.get("database", "password")] if config.get("database", "host"): args += ["-H", config.get("database", "host")] if config.get("database", "port"): args += ["-P", config.get("database", "port")] args += [path] print "Importing OSM data from file '%s'" % path subprocess.check_call(args) # Calculate and return bounding box with open(path) as f: d = f.read(4096) match = self.rx_bounds.search(d) if match: return parse_osm_bounds(match.group(1)) match = self.rx_bound_box.search(d) if match: b = match.group(1).split(",") return tuple(float(x) for x in (b[1], b[0], b[3], b[2])) else: raise CommandError("Cannot find bounding box")
def get_seconds(section, option): v = config.get(section, option) m = 1 if v.endswith("h"): v = v[:-1] m = 3600 elif v.endswith("d"): v = v[:-1] m = 24 * 3600 elif v.endswith("w"): v = v[:-1] m = 7 * 24 * 3600 elif v.endswith("m"): v = v[:-1] m = 30 * 24 * 3600 elif v.endswith("y"): v = v[:-1] m = 365 * 24 * 3600 try: v = int(v) except ValueError: raise "Invalid expiration option in %s:%s" % (section, option) return v * m