def vobj_to_typed(val, root, typelist): tag = root.tag tag = tag[1 + tag.rindex('}'):] if tag in typelist: types = typelist[tag] if isinstance(types, tuple): if is_sequence(val): val = u' '.join(val) for t in types: if type_checkers[t](val): append_typed_el(root, t, val) break else: append_typed_el(root, 'unknown', val) elif types == 'text-list': if not is_sequence(val): val = [val] for v in val: if is_sequence(v): v = ",".join(v) append_typed_el(root, 'text', v) else: append_typed_el(root, types, val) else: if is_sequence(val): val = u' '.join(val) append_typed_el(root, 'unknown', val)
def find(cls, fields): r = Config.redis() if not r: return [] results = [] ids = Query.get_unexpired() for i in ids: q = Query(q_id=i) if not q.query: # sometimes query meta data is incomplete, usually when I'm break^H^H^H^H^Htesting. continue for k, v in fields.items(): if k in ('after-ago', 'after', 'before-ago', 'before'): dur = parse_duration(v) if dur: v = (datetime.utcnow() - dur) else: v = inputs.datetime_from_iso8601(v) pass if (q.queried < v) and k in ('after-ago', 'after'): q = None break elif (q.queried > v) and k in ('before-ago', 'before'): q = None break pass elif k in ('sensors', ): if frozenset(q.sensors) != frozenset(v): q = None break elif k in ('limit-packets', 'limit-bytes'): continue elif k not in q.query: Config.logger.info("Skipping: {} - {}".format(q.query, k)) q = None break else: if is_sequence(v) and v != [ vi for vi in v if q.query.find(vi) >= 0 ]: Config.logger.info("Skipping: {} - {}".format( q.query, v)) q = None break elif is_str(v) and v not in q.query: Config.logger.info("Skipping: {} - {}".format( q.query, v)) q = None break if q: results.append(q.json()) return results
def get_stats(self, selected_sensors=None): fn_name = "get_stats" logger.debug("Begin {}".format(fn_name)) logger.debug("celery id: {}".format(self.request.id)) datas = [] for instance in instances: if (selected_sensors is None or (is_sequence(selected_sensors) and instance['sensor'] in selected_sensors)): logger.debug("{}: query instance: {}".format( fn_name, instance['sensor'])) url = "https://{}:{}/debug/stats".format(instance['host'], instance['port']) r = requests.get(url, cert=(instance['cert'], instance['key']), verify=instance['ca']) logger.debug("{}: response code: {}".format( fn_name, r.status_code)) if r.status_code != 200: return json.dumps({}) lines = r.text.split('\n') data = {} for line in lines: if len(line.strip()) > 0: k, v = line.split() data[k] = int(v) if 'oldest_timestamp' in data: ot = data['oldest_timestamp'] dt = datetime.utcfromtimestamp(0) + timedelta(microseconds=ot / 1000) data['oldest_timestamp'] = dt.isoformat() + 'Z' data['sensor'] = instance['sensor'] datas.append(data) logger.debug("End {}".format(fn_name)) return datas
def vobj_to_str(vobj, root, attributes): """Fill a XCard element's properties and text from the vobject's properties :type vobj: :vobject-class:`VBase` :param vobj: the vcard content element :type root: :lxml-class:`_Element` :param root: the xCard element to be filled :type attributes: [str] :param attributes: the list of attributes to be filled """ for n in attributes: val = getattr(vobj, n, None) n = n if n != 'family' else 'surname' if val: el = root.makeelement(VCARD_NSB + n.lower(), nsmap=NSMAP) root.append(el) if is_sequence(val): val = u' '.join(val) el.text = val
def fill_tree_from_vcard(node, vcard_): for e in vcard_.getChildren(): tag = e.name.lower() if tag in exclude_tags: continue v = e.transformToNative().value if not v or (is_sequence(v) and not any(v)): continue if e.group: try: master_el = groups[e.group] except KeyError: master_el = root.makeelement(VCARD_NSB + "group", nsmap=NSMAP) master_el.set("name", e.group) groups[e.group] = master_el else: master_el = node el = master_el.makeelement(VCARD_NSB + tag, nsmap=NSMAP) master_el.append(el) if e.params: params = el.makeelement(VCARD_NSB + 'parameters', nsmap=NSMAP) el.append(params) for k, v in e.params.iteritems(): param = params.makeelement(VCARD_NSB + k.lower(), nsmap=NSMAP) params.append(param) v = v.strip() vobj_to_typed_param(v, param) if isinstance(v, vcard.Address): vobj_to_str(v, el, vcard.ADDRESS_ORDER) elif isinstance(v, vcard.Name): vobj_to_str(v, el, vcard.NAME_ORDER) else: vobj_to_typed_properties(v, el) if tag == "org": id_ = ";".join(el.itertext()) el.set("uuidstr", id_)
def raw_query(self, query, headers={}, selected_sensors=None): logger.debug("Begin raw_query") logger.info("Query: {}".format(query)) import subprocess, os, os.path datas = [] error = None job_path = os.path.join(app.flask_app.config['SPOOL_DIR'], self.request.id) os.mkdir(job_path, 0700) outputs = [] # This could feasibily be done in parallel for instance in instances: if (selected_sensors is None or (is_sequence(selected_sensors) and instance['sensor'] in selected_sensors) or (not is_sequence(selected_sensors) and instance['sensor'] == selected_sensors)): url = "https://%s:%i/query" % (instance['host'], instance['port']) logger.debug("Processing instance: {}".format(instance['sensor'])) r = requests.post(url, data=query, headers=headers, cert=(instance['cert'], instance['key']), verify=instance['ca']) logger.debug("Error {}: {}".format(r.status_code, r.reason)) # Some error happened if r.status_code == 400: error = r.reason # Read response and write to temporary file elif r.status_code == 200: out_file = os.path.join(job_path, "{}.pcap".format(instance['sensor'])) with open(out_file, 'wb') as f: f.write(r.content) outputs.append(out_file) else: # This was unexpected error = "Unexpeded error: {}".format(instance['sensor']) if len(outputs) > 0: logger.debug("Processing {} outputs", len(outputs)) # We have several files, lets merge them job_file = os.path.join(job_path, "{}.pcap".format(self.request.id)) cmd = ["/usr/sbin/mergecap", "-F", "pcap", "-w", job_file] cmd.extend(outputs) logger.debug("Calling mergecap as: {}".format(" ".join(cmd))) proc = subprocess.Popen(cmd) proc.wait() # Cleanup temporary files if proc.returncode == 0: logger.debug("Removing temp files: {}".format(outputs)) for item in outputs: os.remove(item) return (proc.returncode, job_file) else: return (-1, error)