def handle_mirror(self, split=False, path=None, *args, **options): from noc.sa.models.managedobject import ManagedObject from noc.main.models.pool import Pool mirror = os.path.realpath(path) self.print("Mirroring to %s" % path) if self.repo == "config": for o_id, address, pool in self.progress( ManagedObject.objects.filter().values_list( "id", "address", "pool")): pool = Pool.get_by_id(pool) data = self.vcs.get(self.clean_id(o_id)) if data: if split: mpath = os.path.realpath( os.path.join(mirror, str(pool), str(address))) else: mpath = os.path.realpath( os.path.join(mirror, str(address))) if mpath.startswith(mirror): safe_rewrite(mpath, data) else: self.print(" !!! mirror path violation for" % address) self.print("Done")
def test_read_write(start, tail, expected): fn = os.path.join("/tmp", "noc-test-fu-%d" % time.time()) safe_rewrite(fn, start) safe_append(fn, tail) data = read_file(fn) os.unlink(fn) assert data == expected
def install(cls, data): """ Write data to the proper path :param data: :return: """ c = Collection(data["$collection"]) data = c.dereference(data) o = c.model(**data) # Format JSON json_data = o.to_json() # Write path = os.path.join( cls.PREFIX, c.name, o.get_json_path() ) if "uuid" not in data: raise ValueError("Invalid JSON: No UUID") c.stdout.write("[%s|%s] Installing %s\n" % ( c.name, data["uuid"], path)) safe_rewrite( path, json_data, mode=0o644 )
def save_state(self): """ Save current state """ if not self.new_state_path: return self.logger.info("Summary: %d new, %d changed, %d removed", self.c_add, self.c_change, self.c_delete) self.logger.info("Error delete by reffered: %s", "\n".join(self.reffered_errors)) t = time.localtime() archive_path = os.path.join( self.archive_dir, "import-%04d-%02d-%02d-%02d-%02d-%02d.csv.gz" % tuple(t[:6])) self.logger.info("Moving %s to %s", self.new_state_path, archive_path) if self.new_state_path.endswith(".gz"): # Simply move the file shutil.move(self.new_state_path, archive_path) else: # Compress the file self.logger.info("Compressing") with open(self.new_state_path, "r") as s: with gzip.open(archive_path, "w") as d: d.write(s.read()) os.unlink(self.new_state_path) self.logger.info("Saving mappings to %s", self.mappings_path) mdata = "\n".join("%s,%s" % (k, self.mappings[k]) for k in sorted(self.mappings)) safe_rewrite(self.mappings_path, mdata)
def handle_export(self, list_collection=False, export_path=None, export_collections=None, export_model_names=None, export_model_uuids=None): MODELS = {} for c in COLLECTIONS: cm = get_model(c) cn = cm._meta["json_collection"] MODELS[cn] = cm if list_collection is not None: if list_collection is True: for c in Collection.iter_collections(): print("%s" % c.name, file=self.stdout) else: if list_collection not in MODELS: print("Collection not found", file=self.stdout) return objs = MODELS[list_collection].objects.all().order_by('name') for o in objs: print("uuid:%s name:\"%s\"" % (o.uuid, o.name), file=self.stdout) else: if not export_path or not export_collections: return if not os.path.isdir(export_path): self.die("Path not found: %s" % export_path) for ecname in export_collections: if ecname not in MODELS: print("Collection not found", file=self.stdout) continue kwargs = {} if export_model_names: kwargs['name__in'] = export_model_names elif export_model_uuids: kwargs['uuid__in'] = export_model_uuids objs = MODELS[ecname].objects.filter(**kwargs).order_by('name') for o in objs: path = os.path.join( export_path, ecname, o.get_json_path() ) print("export \"%s\" to %s" % (o.name, path), file=self.stdout) safe_rewrite( path, o.to_json(), mode=0o644 )
def backup_postgres(self): """ Backup postgresql database """ def pgpass_quote(s): return s.replace("\\", "\\\\").replace(":", "\\:") now = datetime.datetime.now() # host, port, database, user, password pgpass = ["*", "*", "*", "*", ""] out = "noc-db-%04d-%02d-%02d-%02d-%02d.dump" % ( now.year, now.month, now.day, now.hour, now.minute) out = os.path.join(config.get("path", "backup_dir"), out) # Build pg_dump command and options cmd = [config.get("path", "pg_dump"), "-Fc"] cmd += ["-f", out] if config.pg.user: cmd += ["-U", config.pg.user] pgpass[3] = config.pg.user if config.pg.password: pgpass[4] = config.pg.password cmd += ["-h", config.pg_connection_args["host"]] pgpass[0] = config.pg_connection_args["host"] if config.pg_connection_args["port"]: cmd += ["-p", str(config.pg_connection_args["port"])] pgpass[1] = config.pg_connection_args["port"] cmd += [settings.DATABASES["default"]["NAME"]] pgpass[2] = settings.DATABASES["default"]["NAME"] # Create temporary .pgpass pgpass_data = ":".join([pgpass_quote(x) for x in pgpass]) pgpass_path = os.path.join(os.getcwd(), "local", "cache", "pgpass", ".pgpass") safe_rewrite(pgpass_path, pgpass_data, mode=0o600) env = os.environ.copy() env["PGPASSFILE"] = pgpass_path # Launch pg_dump self.info("Dumping PostgreSQL database into %s" % out) self.debug(" ".join(cmd)) retcode = self.subprocess_call(cmd, env=env) if retcode != 0: self.error("dump failed. Removing broken dump %s" % out) self.safe_unlink(out) return False self.safe_unlink(pgpass_path) # Remove left pgpass return True
def error_report(reverse=config.traceback.reverse, logger=logger): fp = error_fingerprint() r = get_traceback(reverse=reverse, fp=fp) logger.error(r) metrics["errors"] += 1 if config.sentry.url: try: raven_client.captureException(fingerprint=[fp]) except Exception as e: logger.error("Failed to sent problem report to Sentry: %s", e) if ENABLE_CP: fp = error_fingerprint() path = os.path.join(CP_NEW, fp + ".json") if os.path.exists(path): # Touch file os.utime(path, None) else: metrics["unique_errors"] += 1 # @todo: TZ # @todo: Installation ID c = { "ts": datetime.datetime.now().isoformat(), "uuid": fp, # "installation": None, "process": SERVICE_NAME, "version": version.version, "branch": version.branch, "tip": version.changeset, "changeset": version.changeset, "traceback": r } try: safe_rewrite(path, ujson.dumps(c)) if CP_SET_UID: os.chown(path, CP_SET_UID, -1) logger.error("Writing CP report to %s", path) except OSError as e: logger.error("Unable to write CP report: %s", e) return r
def handle_mirror(self, *args): mirror = config.path.config_mirror_path if not mirror: raise CommandError("No mirror path set") mirror = os.path.realpath(mirror) self.out("Mirroring") if self.repo == "sa.managedobject.config": for o in ManagedObject.objects.filter(is_managed=True): v = self.get_value(o) if v: if self.split == 'pool': mpath = os.path.realpath( os.path.join(mirror, unicode(o.pool.name), unicode(o))) else: mpath = os.path.realpath( os.path.join(mirror, unicode(o))) if mpath.startswith(mirror): self.out(" mirroring %s" % o) safe_rewrite(mpath, v) else: self.out(" !!! mirror path violation for" % o) self.out("Done")
def compile(self, data): """ Compile MIB, upload to database and store MIB file :param data: MIB text :return: """ if not config.path.smilint or not os.path.exists(config.path.smilint): return { "status": False, "msg": "smilint is missed", "error": ERR_MIB_TOOL_MISSED } if not config.path.smilint or not os.path.exists(config.path.smidump): return { "status": False, "msg": "smidump is missed", "error": ERR_MIB_TOOL_MISSED } # Put data to temporary file with temporary_file(data) as tmp_path: # Pass MIB through smilint to detect missed modules self.logger.debug( "Pass MIB through smilint to detect missed modules") f = subprocess.Popen([config.path.smilint, "-m", tmp_path], stderr=subprocess.PIPE, env=self.SMI_ENV).stderr for l in f: match = self.rx_module_not_found.search(l.strip()) if match: return { "status": False, "msg": "Required MIB missed: %s" % match.group(1), "code": ERR_MIB_MISSED, } self.logger.debug("Convert MIB to python module and load") # Convert MIB to python module and load with temporary_file() as py_path: subprocess.check_call( [ config.path.smidump, "-k", "-q", "-f", "python", "-o", py_path, tmp_path ], env=self.SMI_ENV, ) with open(py_path) as f: p_data = unicode(f.read(), "ascii", "ignore").encode("ascii") with open(py_path, "w") as f: f.write(p_data) m = imp.load_source("mib", py_path) # NOW we can deduce module name mib_name = m.MIB["moduleName"] # Check module dependencies depends_on = {} # MIB Name -> Object ID self.logger.debug("Check module dependencies: %s", m.MIB.get("imports", "")) if "imports" in m.MIB: for i in m.MIB["imports"]: if "module" not in i: continue rm = i["module"] if rm in depends_on: continue md = MIB.get_by_name(rm) if md is None: return { "status": False, "msg": "Required MIB missed: %s" % rm, "code": ERR_MIB_MISSED, } depends_on[rm] = md # Get MIB latest revision date try: last_updated = datetime.datetime.strptime( sorted([x["date"] for x in m.MIB[mib_name]["revisions"]])[-1], "%Y-%m-%d %H:%M") except ValueError: last_updated = datetime.datetime(year=1970, month=1, day=1) self.logger.debug("Extract MIB typedefs") # Extract MIB typedefs typedefs = {} if "typedefs" in m.MIB: for t in m.MIB["typedefs"]: typedefs[t] = MIB.parse_syntax(m.MIB["typedefs"][t]) # Check mib already uploaded mib_description = m.MIB[mib_name].get("description", None) mib = MIB.objects.filter(name=mib_name).first() if mib is not None: mib.description = mib_description mib.last_updated = last_updated mib.depends_on = sorted(depends_on) mib.typedefs = typedefs mib.save() # Delete all MIB Data mib.clean() else: # Create MIB mib = MIB( name=mib_name, description=mib_description, last_updated=last_updated, depends_on=sorted(depends_on), typedefs=typedefs, ) mib.save() # Upload MIB data cdata = [] for i in ["nodes", "notifications"]: if i in m.MIB: cdata += [{ "name": "%s::%s" % (mib_name, node), "oid": v["oid"], "description": v.get("description"), "syntax": v["syntax"]["type"] if "syntax" in v else None, } for node, v in six.iteritems(m.MIB[i])] mib.load_data(cdata) # Move file to permanent place safe_rewrite(self.get_path(mib_name), data) return {"status": True, "mib": mib_name}