async def send(self, method: str = None, body: str = None): # @todo: Apply encoding self.error = None body = body or "" method = method or self.method h = { # "Host": str(u.netloc), # "Connection": "close", "CSeq": self.cseq, "User-Agent": DEFAULT_USER_AGENT, } if self.auth: h["Authorization"] = self.auth.build_digest_header( self.get_uri(), method, self.headers["WWW-Authenticate"]["Digest"]) req = b"%s %s %s\r\n%s\r\n\r\n%s" % ( smart_bytes(method), smart_bytes(self.get_uri()), DEFAULT_PROTOCOL, b"\r\n".join(b"%s: %s" % (smart_bytes(k), smart_bytes(h[k])) for k in h), smart_bytes(body), ) self.logger.debug("Send: %r", req) await self.stream.write(req) self.cseq += 1
def __new__(mcs, name, bases, attrs): n = type.__new__(mcs, name, bases, attrs) n.rogue_char_cleaners = n._get_rogue_chars_cleaners() # if n.command_more: warnings.warn( "%s: 'command_more' is deprecated and will be removed in NOC 20.3" % n.name, RemovedInNOC2003Warning, ) if isinstance(n.pattern_more, str): warnings.warn( "%s: 'command_more' must be a list of (pattern, command). " "Support for textual 'command_more' will be removed in NOC 20.3" % n.name, RemovedInNOC2003Warning, ) n.pattern_more = [(n.pattern_more, n.command_more)] n.command_more = None # Fix binary attributes for attr in mcs.BINARY_ATTRS: v = getattr(n, attr, None) if v is not None and isinstance(v, str): warnings.warn( "%s: '%s' must be of binary type. Support for text values will be removed in NOC 20.3" % (n.name, attr), RemovedInNOC2003Warning, ) setattr(n, attr, smart_bytes(v)) # Fix command_more pattern_more = [] for pattern, cmd in n.pattern_more: if not isinstance(pattern, bytes): warnings.warn( "%s: 'pattern_more' %r pattern must be of binary type. " "Support for text values will be removed in NOC 20.2" % (n.name, pattern)) pattern = smart_bytes(pattern) if not isinstance(cmd, bytes): warnings.warn( "%s: 'pattern_more' %r command must be of binary type. " "Support for text values will be removed in NOC 20.2" % (n.name, cmd)) cmd = smart_bytes(cmd) pattern_more += [(pattern, cmd)] n.pattern_more = pattern_more # Build patterns n.patterns = n._get_patterns() # Build effective snmp_display_hints for subclasses if n.name: snmp_display_hints = {} for b in bases: if issubclass(b, BaseProfile): snmp_display_hints.update(b.snmp_display_hints) snmp_display_hints.update(attrs.get("snmp_display_hints", {})) n.snmp_display_hints = { k: snmp_display_hints[k] for k in snmp_display_hints if snmp_display_hints[k] } return n
def send_message(data: Any, message_type: str, headers: Optional[Dict[str, bytes]], sharding_key: int = 0): """ Build message and schedule to send to mx service :param data: :param message_type: :param headers: :param sharding_key: :return: """ msg_headers = { MX_MESSAGE_TYPE: smart_bytes(message_type), MX_SHARDING_KEY: smart_bytes(sharding_key), } if headers: msg_headers.update(headers) svc = get_service() n_partitions = get_mx_partitions() svc.publish( value=orjson.dumps(data), stream=MX_STREAM, partition=sharding_key % n_partitions, headers=msg_headers, )
def get_auth(self, params): """ Web JS i.getAuth = function(a, b, c) { switch (c = c || j.encryption) { case "Basic": return Base64.encode(a + ":" + b); case "Default": return hex_md5(a + ":" + j.random + ":" + hex_md5(a + ":" + j.realm + ":" + b)); default: return b } :param params: response params dictionary :type params: dict :return: Password string :rtype: str """ if params["encryption"] == "Basic": return codecs.encode("%s:%s" % (self.user, self.password), "base64") elif params["encryption"] == "Default": A1 = (hashlib.md5( smart_bytes("%s:%s:%s" % (self.user, params["realm"], self.password))).hexdigest().upper()) return (hashlib.md5( smart_bytes( "%s:%s:%s" % (self.user, params["random"], A1))).hexdigest().upper()) return self.password
def handle_import(self, storage, path=None, paths=None, *args, **options): """ Importing yaml file to beef :param storage: :param path: :param paths: :return: """ for import_path in paths: self.print("Importing %s ..." % import_path) with open(import_path, "r") as f: data = yaml.safe_load(f) for c in data["cli_fsm"]: c["reply"] = [ codecs.encode(smart_bytes(reply), self.CLI_ENCODING) for reply in c["reply"] ] for c in data["cli"]: c["reply"] = [ codecs.encode(smart_bytes(reply), self.CLI_ENCODING) for reply in c["reply"] ] for m in data["mib"]: m["value"] = codecs.encode(smart_bytes(m["value"]), self.CLI_ENCODING) try: beef = Beef.from_json(data) except ValueError: self.print("Error when importing beef file %s" % import_path) continue st = self.get_storage(storage, beef=True) if not path: path = smart_text( self.DEFAULT_BEEF_IMPORT_PATH_TEMPLATE.format(beef)) beef.save(st, smart_text(path))
def save(self, storage, path): """ Write beef to external storage. Compression depends on extension. Following extensions are supported: * .json - JSON without compression * .json.gz - JSON with gzip compression * .json.bz2 - JSON with bzip2 compression :param storage: ExtStorage instance :param path: Beef path :return: Compressed, Uncompressed sizes """ data = ujson.dumps(self.get_data()) usize = len(data) dir_path = os.path.dirname(path) if path.endswith(".gz"): data = self.compress_gzip(smart_bytes(data)) elif path.endswith(".bz2"): data = self.compress_bz2(smart_bytes(data)) csize = len(data) try: with storage.open_fs() as fs: if dir_path and dir_path != "/": fs.makedirs(dir_path, recreate=True) fs.writebytes(path, bytes(data)) except storage.Error as e: raise IOError(str(e)) return csize, usize
def decode_v1(self, data): self.version = self.get_or_die(data, "version") self.uuid = self.get_or_die(data, "uuid") self.spec = self.get_or_die(data, "spec") box = self.get_or_die(data, "box") self.box = Box( profile=self.get_or_die(box, "profile"), vendor=self.get_or_die(box, "vendor"), platform=self.get_or_die(box, "platform"), version=self.get_or_die(box, "version"), ) self.changed = self.get_or_die(data, "changed") self.description = data.get("description") or "" self.cli_fsm = [ CLIFSM( state=self.get_or_die(d, "state"), reply=[smart_bytes(n) for n in self.get_or_die(d, "reply")], ) for d in self.get_or_die(data, "cli_fsm") ] self.cli = [ CLI( names=[n for n in self.get_or_die(d, "names")], request=smart_bytes(self.get_or_die(d, "request")), reply=[smart_bytes(n) for n in self.get_or_die(d, "reply")], ) for d in self.get_or_die(data, "cli") ] self.mib_encoding = self.get_or_die(data, "mib_encoding") self.mib = [ MIB(oid=self.get_or_die(d, "oid"), value=smart_bytes(self.get_or_die(d, "value"))) for d in self.get_or_die(data, "mib") ] self._mib_decoder = getattr(self, "mib_decode_%s" % self.mib_encoding) self.cli_encoding = self.get_or_die(data, "cli_encoding") self._cli_decoder = getattr(self, "cli_decode_%s" % self.cli_encoding)
def is_differ(path, content): """ Check file content is differ from string """ if os.path.isfile(path): with open(path) as f: cs1 = hashlib.sha1(smart_bytes(f.read())).digest() cs2 = hashlib.sha1(smart_bytes(content)).digest() return cs1 != cs2 return True
def __new__(mcs, name, bases, attrs): n = type.__new__(mcs, name, bases, attrs) n.rogue_char_cleaners = n._get_rogue_chars_cleaners() # if n.command_more: warnings.warn( "%s: 'command_more' is deprecated and will be removed in NOC 20.2" % n.name, RemovedInNOC2002Warning, ) if isinstance(n.pattern_more, six.string_types): warnings.warn( "%s: 'command_more' must be a list of (pattern, command). " "Support for textual 'command_more' will be removed in NOC 20.2" % n.name, RemovedInNOC2002Warning, ) n.pattern_more = [(n.pattern_more, n.command_more)] n.command_more = None # Fix binary attributes for attr in mcs.BINARY_ATTRS: v = getattr(n, attr, None) if v is not None and isinstance(v, six.text_type): warnings.warn( "%s: '%s' must be of binary type. Support for text values will be removed in NOC 20.2" % (n.name, attr), RemovedInNOC2002Warning, ) setattr(n, attr, smart_bytes(v)) # Fix command_more pattern_more = [] for pattern, cmd in n.pattern_more: if not isinstance(pattern, six.binary_type): warnings.warn( "%s: 'pattern_more' %r pattern must be of binary type. " "Support for text values will be removed in NOC 20.2" % (n.name, pattern)) pattern = smart_bytes(pattern) if not isinstance(cmd, six.binary_type): warnings.warn( "%s: 'pattern_more' %r command must be of binary type. " "Support for text values will be removed in NOC 20.2" % (n.name, cmd)) cmd = smart_bytes(cmd) pattern_more += [(pattern, cmd)] n.pattern_more = pattern_more # Build patterns n.patterns = n._get_patterns() return n
def get_digest(self, uri, realm): """ :param uri: :param realm: :param method: GET/POST :return: """ A1 = "%s:%s:%s" % (self.user, realm, self.password) A2 = "%s:%s" % (self.method, uri) HA1 = hashlib.md5(smart_bytes(A1)).hexdigest() HA2 = hashlib.md5(smart_bytes(A2)).hexdigest() return HA1, HA2
def api_job_log(self, request, id): o = self.get_object_or_404(ManagedObject, id=id) if not o.has_access(request.user): return self.response_forbidden("Access denied") r = [] for job in self.job_map: key = "discovery-%s-%s" % (job, o.id) d = get_db()["noc.joblog"].find_one({"_id": key}) if d and d["log"]: r += [b"\n", smart_bytes(job), b"\n"] r += [zlib.decompress(smart_bytes((d["log"])))] if r: return self.render_plain_text(b"".join(r)) else: return self.render_plain_text("No data")
def from_route(cls, route: MessageRoute) -> "Route": """ Build Route from database config :param route: :return: """ r = Route(route.name) # Compile match section match_eq: DefaultDict[str, List[bytes]] = defaultdict(list) match_re: DefaultDict[str, List[bytes]] = defaultdict(list) match_ne: List[Tuple[str, bytes]] = [] for match in route.match: if match.is_eq: match_eq[match.header] += [smart_bytes(match.value)] elif match.is_ne: match_ne += [(match.header, smart_bytes(match.value))] elif match.is_re: match_re[match.header] += [smart_bytes(match.value)] expr = [] # Expression for == for header in match_eq: if len(match_eq[header]) == 1: # == expr += ["headers[%r] == %r" % (header, match_eq[header][0])] else: # in expr += [ "headers[%r] in (%s)" % (header, ", ".join("%r" % x for x in match_eq[header])) ] # Expression for != for header, value in match_ne: expr += ["headers[%r] != %r" % (header, smart_bytes(value))] # Expression for regex # @todo # Compile matching code if expr: cond_code = " and ".join(expr) else: cond_code = "True" r.match_co = compile(cond_code, "<string>", "eval") # Compile transmute part r.transmutations = [ Transmutation.from_transmute(t) for t in route.transmute ] # Compile action part r.actions = [Action.from_action(a) for a in route.action] return r
def get_vpn_id(vpn: Dict[str, Any]) -> str: """ Calculate RFC2685-compatible VPN ID :param vpn: Dict containing following keys * type - with VPN type ("VRF", "VPLS", "VLL", "EVPN") * vpn_id (optional) - given vpn id * rd (optional) - VRF RD * name (optional) - Local VPN name * rt_export (optional) - List of exported route targets (["xx:yy", ..., "xx:yy"] :return: """ vpn_id = vpn.get("vpn_id") if vpn_id: # Already calculated return vpn_id.lower() # Generate VPN identity fingerprint rt_export = vpn.get("rt_export", []) if rt_export: identity = ":".join(sorted(rt_export)) elif vpn.get("rd"): if vpn["rd"] == "0:0": return "0:0" identity = vpn["rd"] elif vpn.get("name"): identity = vpn["name"] else: raise ValueError("Cannot calculate VPN id") identity = "%s:%s" % (T_MAP.get(vpn["type"], vpn["type"]), identity) # RFC2685 declares VPN ID as <IEEE OUI (3 octets)>:<VPN number (4 octets) # Use reserved OUI range 00 00 00 - 00 00 FF to generate # So we have 5 octets to fill vpn id # Use last 5 octets of siphash 2-4 i_hash = siphash24(SIPHASH_SEED, smart_bytes(identity)) return "%x:%x" % struct.unpack("!BI", i_hash[3:])
def iter_actions(self) -> Iterator[Tuple[str, Dict[str, bytes]]]: """ mx-compatible actions. Yields tuples of `stream`, `headers` :return: """ for method, param, _ in self.active_members: yield MX_STREAMS[method], {"To": smart_bytes(param)}
def encode(cls, data): """ v1 encoding: cPickle + zlib.compress :param data: :return: """ return bz2.compress(smart_bytes(data), 9)
def on_read(self, data, address): metrics["syslog_msg_in"] += 1 cfg = self.service.lookup_config(address[0]) if not cfg: return # Invalid event source # Convert data to valid UTF8 data = smart_bytes(smart_text(data, errors="ignore")) # Parse priority priority = 0 if data.startswith(b"<"): idx = data.find(b">") if idx == -1: return try: priority = int(data[1:idx]) except ValueError: pass data = data[idx + 1:].strip() # Get timestamp ts = int(time.time()) # self.service.register_message(cfg, ts, data, facility=priority >> 3, severity=priority & 7)
def out(self, s): if isinstance(s, str): sys.stdout.write(s.encode("utf-8")) else: sys.stdout.write(smart_bytes(smart_text(s, encoding=self.encoding))) sys.stdout.flush()
def _get_rogue_chars_cleaners(cls): def get_bytes_cleaner(s): def _inner(x): return x.replace(s, b"") return _inner def get_re_cleaner(s): def _inner(x): return s.sub(b"", x) return _inner chain = [] if cls.rogue_chars: for rc in cls.rogue_chars: if isinstance(rc, six.text_type): chain += [get_bytes_cleaner(smart_bytes(rc))] elif isinstance(rc, six.binary_type): chain += [get_bytes_cleaner(rc)] elif hasattr(rc, "sub"): chain += [get_re_cleaner(rc)] else: raise ValueError("Invalid rogue char expression: %r" % rc) return chain
def get_publish_request( value: bytes, stream: Optional[str] = None, key: Optional[bytes] = None, partition: Optional[int] = None, headers: Optional[Dict[str, bytes]] = None, ack_inbox: Optional[str] = None, correlation_id: Optional[str] = None, ack_policy: AckPolicy = AckPolicy.LEADER, auto_compress: bool = False, ) -> PublishRequest: to_compress = (auto_compress and config.liftbridge.compression_method and config.liftbridge.compression_threshold and len(value) >= config.liftbridge.compression_threshold) if to_compress: value = get_compressor(config.liftbridge.compression_method)(value) # Publish Request req = PublishRequest(value=value, ackPolicy=ack_policy.value) if stream: req.stream = stream if key: req.key = key if partition: req.partition = partition if to_compress: req.headers[H_ENCODING] = smart_bytes( config.liftbridge.compression_method) if headers: for h, v in headers.items(): req.headers[h] = v if ack_inbox: req.ackInbox = ack_inbox if correlation_id: req.correlationIid = correlation_id return req
def set_dashboard(self, config): """ Save dashboard config. :param config: :return: datshboard id """ if "id" in config: d = self._get_dashboard(config["id"], access_level=1) if not d: metrics["error", ("type", "dashboard_not_found")] += 1 raise APIError("Dashboard not found") else: d = Dashboard.objects.filter(title=config.get("title")).first() if d: metrics["error", ("type", "bad_dashboard_name")] += 1 raise APIError("Dashboard name exists") d = Dashboard(id=str(bson.ObjectId()), owner=self.handler.current_user) d.format = config.get("format", 1) config["id"] = str(d.id) d.config = zlib.compress(smart_bytes(ujson.dumps(config))) d.changed = datetime.datetime.now() d.title = config.get("title") # @todo: Generate title d.description = config.get("description") d.tags = config.get("tags", []) d.save() return str(d.id)
def resolve_pattern_prompt(self, match): """ Resolve adaptive pattern prompt """ old_pattern_prompt = self.patterns["prompt"].pattern pattern_prompt = old_pattern_prompt sl = self.profile.can_strip_hostname_to for k, v in six.iteritems(match.groupdict()): if v: k = smart_bytes(k) if k == b"hostname" and sl and len(v) > sl: ss = list(reversed(v[sl:])) v = re.escape(v[:sl]) + reduce( lambda x, y: b"(?:%s%s)?" % (re.escape(y), x), ss[1:], b"(?:%s)?" % re.escape(ss[0]), ) else: v = re.escape(v) pattern_prompt = replace_re_group(pattern_prompt, b"(?P<%s>" % k, v) pattern_prompt = replace_re_group(pattern_prompt, b"(?P=%s" % k, v) else: self.logger.error("Invalid prompt pattern") if old_pattern_prompt != pattern_prompt: self.logger.debug("Refining pattern prompt to %r", pattern_prompt) self.patterns["prompt"] = re.compile(pattern_prompt, re.DOTALL | re.MULTILINE)
def on_unprivileged_prompt(self, data, match): self.set_state("unprivileged_prompt") if self.to_raise_privileges: # Start privilege raising sequence if not self.profile.command_super: self.on_failure(data, match, CLINoSuperCommand) self.send( smart_bytes(self.profile.command_super, encoding=self.native_encoding) + (self.profile.command_submit or b"\n")) # Do not remove `pager` section # It fixes this situation on Huawei MA5300: # xxx>enable # { <cr>|level-value<U><1,15> }: # xxx# self.expect( { "username": self.on_super_username, "password": self.on_super_password, "prompt": self.on_prompt, "pager": self.send_pager_reply, }, self.profile.cli_timeout_super, ) else: # Do not raise privileges # Use unprivileged prompt as primary prompt self.patterns["prompt"] = self.patterns["unprivileged_prompt"] return self.on_prompt(data, match)
async def whois_async(query, fields=None): """ Perform whois request :param query: :param fields: :return: """ logger.debug("whois %s", query) # Get appropriate whois server if is_fqdn(query): # Use TLD.whois-servers.net for domain lookup tld = query.split(".")[-1] server = "%s.whois-servers.net" % tld else: server = DEFAULT_WHOIS_SERVER # Perform query try: client = TCPClient() stream = await client.connect(server, DEFAULT_WHOIS_PORT) except IOError as e: logger.error("Cannot resolve host '%s': %s", server, e) return try: await stream.write(smart_bytes(query) + b"\r\n") data = await stream.read_until_close() finally: stream.close() data = smart_text(data) data = parse_response(data) if fields: data = [(k, v) for k, v in data if k in fields] return data
def send_message(cls, data: Dict[str, Any], change_id: bson.ObjectId) -> None: """ Send MX message :param data: :param change_id: :return: """ data["$changeid"] = str(change_id) # Build headers headers = { MX_CHANGE_ID: smart_bytes(change_id), } additional_headers = cls.get_msg_headers(data) if additional_headers: headers.update(additional_headers) # Schedule to send send_message( data, message_type=cls.name, headers=headers, sharding_key=hash_int(data["id"]) & 0xFFFFFFFF, ) # Cleanup del data["$changeid"]
def load(self): """ Load up all the rules and populate the chains :return: """ for route in MessageRoute.objects.filter( is_active=True).order_by("order"): self.chains[smart_bytes(route.type)] += [Route.from_route(route)]
def mib_decode_base64(value): # type: (six.binary_type) -> six.binary_type """ Decode base64 :param value: :return: """ return codecs.decode(smart_bytes(value), "base64")
def cli_decode_quopri(value): # type: (six.binary_type) -> six.binary_type """ Decode quoted-printable :param value: :return: """ return codecs.decode(smart_bytes(value), "quopri")
def hash_str(value): """ Calculate integer hash of value :param value: String :return: Hashed string """ return siphash24(SIPHASH_SEED, smart_bytes(smart_text(value)))
def handle_show(self, options, events, show_json=False): limit = int(options["limit"]) to_suppress = options["suppress"] seen = set() # Message hashes if show_json: self.stdout.write("[\n") spool = None else: self.stdout.write("ID, Object, Class, Subject\n") for e in events: subject = unescape(e.subject) if to_suppress: # Replace volatile parts s = self.rx_volatile_date.sub("", subject) s = self.rx_ip.sub("$IP", s) s = self.rx_float.sub("$FLOAT", s) s = self.rx_int.sub("$INT", s) sh = hashlib.sha1(smart_bytes(s)).hexdigest() # Check subject is already seen if sh in seen: # Suppress seen continue seen.add(sh) if show_json: if spool: print(spool + ",") s = [" {"] s += [' "profile": "%s",' % json_escape(e.managed_object.profile.name)] s += [' "raw_vars": {'] x = [] vars = e.raw_vars keys = [] lkeys = [k for k in vars if k not in ("1.3.6.1.2.1.1.3.0",)] for k in ("source", "profile", "1.3.6.1.6.3.1.1.4.1.0"): if k in vars: keys += [k] lkeys.remove(k) keys += sorted(lkeys) for k in keys: if k in ("collector",): continue x += [' "%s": "%s"' % (json_escape(k), json_escape(vars[k]))] s += [",\n".join(x)] s += [" }"] s += [" }"] spool = "\n".join(s) else: self.stdout.write( "%s, %s, %s, %s\n" % (e.id, e.managed_object.name, e.event_class.name, subject) ) if limit: limit -= 1 if not limit: break if show_json: if spool: self.stdout.write(spool) print("]")
def generate_serial(self, model, number): """ Generate virtual serial number """ seed = [str(self.object.id), str(model.uuid), str(number)] for k in sorted(x for x in self.ctx if not x.startswith("N")): seed += [k, str(self.ctx[k])] h = hashlib.sha256(smart_bytes(":".join(seed))) return "NOC%s" % base64.b32encode(h.digest())[:7]