def error_report(reverse=TRACEBACK_REVERSE, logger=logger): fp = error_fingerprint() r = get_traceback(reverse=reverse, fp=fp) logger.error(r) if ENABLE_CP: fp = error_fingerprint() path = os.path.join(CP_NEW, fp + ".json") if not os.path.exists(path): # @todo: TZ # @todo: Installation ID c = { "ts": datetime.datetime.now().isoformat(), "uuid": fp, # "installation": None, "process": os.path.relpath(sys.argv[0]), "branch": get_branch(), "tip": get_tip(), "traceback": r } try: safe_rewrite(path, json_encode(c)) if CP_SET_UID: os.chown(path, CP_SET_UID, -1) logger.error("Writing CP report to %s", path) except OSError, why: logger.error("Unable to write CP report: %s", why)
def handle_manifest(self, *args, **options): for r in self.iter_repos(**options): sv = defaultdict(set) for f in self.iter_repo_files(r): tc = BeefTestCase() tc.load_beef(f) sv[tc.script].add( ("%s %s" % (tc.vendor, tc.platform), tc.version)) # Format manifest vp = defaultdict(set) # vendor -> profile ps = defaultdict(set) # profile -> script for s in sv: v, p, n = s.split(".") pn = "%s.%s" % (v, p) vp[v].add(pn) ps[pn].add(s) o = [] for v in sorted(vp): o += ["# %s" % v] for p in sorted(vp[v]): o += ["## %s" % p] for sn in sorted(ps[p]): o += ["### %s" % sn] vs = defaultdict(set) # platform -> version for platform, version in sv[sn]: vs[platform].add(version) for platform in sorted(vs): o += [ "+ **%s:** %s" % (platform, ", ".join(sorted(vs[platform]))), "" ] mf = "\n".join(o) path = os.path.join(self.local_repo_path(r), "README.md") print "Writing manifest for repo %s" % r.name safe_rewrite(path, mf)
def save_beef(self, path, **kwargs): def q(s): ts = type(s) if ts == datetime.datetime: return s.isoformat() elif ts == dict: return dict((k, q(s[k])) for k in s) elif ts == list: return [q(x) for x in s] elif ts == tuple: return tuple([q(x) for x in s]) elif isinstance(s, basestring): return str(s).encode("string_escape") else: return s beef = dict((k, getattr(self, k, None)) for k in self.beef_args) beef.update(kwargs) if not beef.get("date"): beef["date"] = datetime.datetime.now() if not beef.get("guid"): beef["guid"] = str(uuid.uuid4()) beef = q(beef) if os.path.isdir(path): path = os.path.join(path, beef["guid"] + ".json") safe_rewrite(path, json_encode(beef), mode=0644) return path
def write(self, data): r = self.get_gridvcs().put(self.id, data) if r and self.mirror: try: safe_rewrite(self.mirror, data) except OSError, why: logger.error("Cannot mirror file to %s: %s", self.mirror, why)
def _complete_job(self, job, status, tb): self.metrics.jobs_time.timer(self.name, job.name, job.key).log( job.started, time.time(), status) if self.to_log_jobs: path = os.path.join(self.log_jobs, job.name, str(job.key)) safe_rewrite(path, job.get_job_log()) group = job.get_group() if group is not None: with self.running_lock: self.running_count[group] -= 1 if not self.running_count[group]: del self.running_count[group] on_complete = job.on_complete t = job.get_schedule(status) if t is None: # Unschedule job self.remove_job(job.name, job.key) else: # Reschedule job t1 = time.time() if self.max_faults and status in (Job.S_FAILED, Job.S_EXCEPTION): code = None if type(tb) == dict: code = tb.get("code") if code in self.IGNORE_MRT_CODES: fc = None # Ignore temporary errors next_status = self.S_WAIT else: # Get fault count fc = self.get_faults(job.name, job.key) + 1 if fc >= self.max_faults: # Disable job next_status = self.S_DISABLED self.logger.info("Disabling job %s(%s) due to %d sequental faults", job.name, job.key, fc) else: next_status = self.S_WAIT else: next_status = self.S_WAIT fc = 0 self.reschedule_job( job.name, job.key, t, status=next_status, last_status=status, duration=t1 - job.started, # @todo: maybe error tb=tb, update_runs=True, faults=fc ) # Reschedule jobs must be executed on complete for job_name, key in on_complete: ts = datetime.datetime.now() self.reschedule_job(job_name, key, ts, skip_running=True)
def save(self): self.logger.info("Updating manifest") rows = sorted( ([r.name, r.uuid, r.path, r.hash] for r in self.items.values()), key=lambda x: x[0]) rows = [["name", "uuid", "path", "hash"]] + rows out = StringIO() writer = csv.writer(out) writer.writerows(rows) safe_rewrite(self.get_collection_path(), out.getvalue(), mode=0644) # Update collection cache self.logger.info("Updating CollectionCache") CollectionCache.merge("%s.%s" % (self.module, self.name), set(self.items))
def write_xml(self, path): """ Generator returning JUnit-compatible XML output """ from xml.dom.minidom import Document logging.info("Writing JUnit XML output to '%s'" % path) out = Document() ts = out.createElement("testsuite") out.appendChild(ts) ts.setAttribute("tests", str(self.testsRun)) ts.setAttribute("errors", str(len(self.errors))) ts.setAttribute("failures", str(len(self.failures))) #ts.setAttribute("name") ts.setAttribute("time", str(self.stop_time - self.start_time)) ts.setAttribute("timestamp", self.timestamp) # Append test cases info for name, test, status, err in sorted(self.test_results, key=lambda x: x[0]): p = name.split(".") tc = out.createElement("testcase") ts.appendChild(tc) tc.setAttribute("classname", ".".join(p[:-1])) tc.setAttribute("name", p[-1]) tc.setAttribute("time", "%.6f" % self.test_timings[name]) if status in (self.R_ERROR, self.R_FAILURE): e = out.createElement("error" if self.R_ERROR else "failure") tc.appendChild(e) e.setAttribute("type", err[0].__name__) e.setAttribute("message", str(err[1])) ft = out.createCDATASection( "%s: %s" % (err[0].__name__, err[1]) + "\n" + format_frames(get_traceback_frames(err[2])) + "\n") e.appendChild(ft) # Applend system-out and system-err so = out.createElement("system-out") o = out.createCDATASection(self.stdout.get()) so.appendChild(o) ts.appendChild(so) se = out.createElement("system-err") o = out.createCDATASection(self.stderr.get()) se.appendChild(o) ts.appendChild(se) r = out.toprettyxml(indent=" " * 4) if path == "-": print r else: safe_rewrite(path, r)
def handle_mirror(self): mirror = config.get("gridvcs", "mirror.%s" % self.repo) or None if not mirror: raise CommandError("No mirror path set") mirror = os.path.realpath(mirror) self.out("Mirroring") if self.repo == "sa.managedobject.config": for o in ManagedObject.objects.filter(is_managed=True): v = self.get_value(o) if v: mpath = os.path.realpath(os.path.join(mirror, unicode(o))) if mpath.startswith(mirror): self.out(" mirroring %s" % o) safe_rewrite(mpath, v) else: self.out(" !!! mirror path violation for" % o) self.out("Done")
def import_objects(self, objects): for o in objects: if not o.uuid: o.uuid = uuid4() o.save() data = o.to_json() mi = CollectionItem(name=self.get_name(o), uuid=o.uuid, path=o.get_json_path(), hash=self.get_hash(data)) self.items[mi.uuid] = mi self.logger.info("Importing %s", mi.name) safe_rewrite(os.path.join(self.module, "collections", self.name, o.get_json_path()), data, mode=0644) self.save()
def install_item(self, data, load=False): if "$collection" in data and data["$collection"] != self.cname: self.die("Installing to invalid collection: %s instead of %s" % (data["$collection"], self.cname)) o = self.doc(**self.dereference(self.doc, data)) self.logger.info("Installing %s", unicode(o)) if not o.uuid: o.uuid = str(uuid4()) load = False # Cannot load due to uuid collision dd = o.to_json() mi = CollectionItem(name=self.get_name(o), uuid=o.uuid, path=o.get_json_path(), hash=self.get_hash(dd)) self.items[mi.uuid] = mi safe_rewrite(os.path.join(self.module, "collections", self.name, o.get_json_path()), dd, mode=0644) if load: # Save to database self.update_item(mi)
def import_files(self, paths): for p in paths: if not os.path.exists(p): raise ValueError("File does not exists: %s" % p) with open(p) as f: try: data = json_decode(f.read()) except ValueError, why: self.die("Failed to read JSON file '%s': %s" % (p, why)) if not isinstance(data, dict): self.die("Invalid JSON file: %s" % p) doc = self.doc(**self.dereference(self.doc, data)) mi = CollectionItem(name=doc.name, uuid=doc.uuid, path=doc.get_json_path(), hash=self.get_hash(data)) self.items[mi.uuid] = mi self.logger.info("Importing %s", doc.name) safe_rewrite(os.path.join(self.module, "collections", self.name, doc.get_json_path()), data, mode=0644)
def load(cls, path, force=False): """ Load MIB from file :param path: MIB path :param force: Load anyways :return: MIB object """ if not os.path.exists(path): raise ValueError("File not found: %s" % path) # Build SMIPATH variable for smidump # to exclude locally installed MIBs smipath = ["share/mibs", "local/share/mibs"] # Pass MIB through smilint to detect missed modules f = subprocess.Popen([config.get("path", "smilint"), "-m", path], stderr=subprocess.PIPE, env={ "SMIPATH": ":".join(smipath) }).stderr for l in f: match = rx_module_not_found.search(l.strip()) if match: raise MIBRequiredException("Uploaded MIB", match.group(1)) # Convert MIB to python module and load with temporary_file() as p: subprocess.check_call([ config.get("path", "smidump"), "-k", "-q", "-f", "python", "-o", p, path ], env={"SMIPATH": ":".join(smipath)}) # Add coding string with open(p) as f: data = unicode(f.read(), "ascii", "ignore").encode("ascii") with open(p, "w") as f: f.write(data) m = imp.load_source("mib", p) mib_name = m.MIB["moduleName"] # Check module dependencies depends_on = {} # MIB Name -> Object ID if "imports" in m.MIB: for i in m.MIB["imports"]: if "module" not in i: continue rm = i["module"] if rm in depends_on: continue md = MIB.objects.filter(name=rm).first() if md is None: raise MIBRequiredException(mib_name, rm) depends_on[rm] = md # Get MIB latest revision date try: last_updated = datetime.datetime.strptime( sorted([x["date"] for x in m.MIB[mib_name]["revisions"]])[-1], "%Y-%m-%d %H:%M") except: last_updated = datetime.datetime(year=1970, month=1, day=1) # Extract MIB typedefs typedefs = {} if "typedefs" in m.MIB: for t in m.MIB["typedefs"]: typedefs[t] = cls.parse_syntax(m.MIB["typedefs"][t]) # Check mib already uploaded mib_description = m.MIB[mib_name].get("description", None) mib = MIB.objects.filter(name=mib_name).first() if force and mib: # Delete mib to forceful update MIBData.objects.filter(mib=mib.id).delete() mib.clean() mib.delete() mib = None if mib is not None: # Skip same version if mib.last_updated >= last_updated: return mib mib.description = mib_description mib.last_updated = last_updated mib.depends_on = sorted(depends_on) mib.typedefs = typedefs mib.save() # Delete all MIB Data mib.clean() else: # Create MIB mib = MIB(name=mib_name, description=mib_description, last_updated=last_updated, depends_on=sorted(depends_on), typedefs=typedefs) mib.save() # Upload MIB data data = [] for i in ["nodes", "notifications"]: if i in m.MIB: data += [{ "name": "%s::%s" % (mib_name, node), "oid": v["oid"], "description": v.get("description"), "syntax": v["syntax"]["type"] if "syntax" in v else None } for node, v in m.MIB[i].items()] mib.load_data(data) # Save MIB to cache if not uploaded from cache lcd = os.path.join("local", "share", "mibs") if not os.path.isdir(lcd): # Ensure directory exists os.makedirs(os.path.join("local", "share", "mibs")) local_cache_path = os.path.join(lcd, "%s.mib" % mib_name) cache_path = os.path.join("share", "mibs", "%s.mib" % mib_name) if ((os.path.exists(local_cache_path) and os.path.samefile(path, local_cache_path)) or (os.path.exists(cache_path) and os.path.samefile(path, cache_path))): return mib with open(path) as f: data = f.read() safe_rewrite(local_cache_path, data) return mib
def generate_pair(path, bits=1024): k = Key.generate("RSA", bits) safe_rewrite(path, k.to_string()) safe_rewrite(path + ".pub", k.public().to_string())