def abort(code, body): if "application/json" in request.headers.get("Accept", ""): apireturn(code, body) error = HTTPError(code, body) for n,v in meta_headers.items(): error.add_header(n, v) raise error
def wrapper(*a, **ka): user, password = request.auth or (None, None) # no authentification when composer if config.MODE=="gui" and (user is None or user!=config.GUI_USER or password!=config.GUI_PASSWORD): err = HTTPError(401, text) err.add_header('WWW-Authenticate', 'Basic realm="%s"' % realm) return err return func(*a, **ka)
def wrapper(*a, **ka): if CONFIG['protect']['status']: user, password = request.auth or (None, "") password = hashlib.sha256(password).hexdigest() if user is None or not (user == CONFIG['protect']['username'] and password == CONFIG['protect']['password']): err = HTTPError(401, text) err.add_header('WWW-Authenticate', 'Basic realm="%s"' % realm) return err return func(*a, **ka)
def root(): """ Display a web-UI :return: """ login, password = request.auth or (None, None) if addon.pass_protect and (login is None or (login, password) != addon.credentials): error = HTTPError(401, 'Access denied') error.add_header('WWW-Authenticate', 'Basic realm="private"') return error else: return template('torrents')
def require_user(self, user): #log.debug("headers %r", request.headers.items()) try: authuser, authpassword = request.auth except TypeError: log.warn("could not read auth header") err = HTTPError(401, "authentication required") err.add_header('WWW-Authenticate', 'Basic realm="devpi"') raise err log.debug("detected auth for user %r", authuser) try: val = self.signer.unsign(authpassword, self.LOGIN_EXPIRATION) except itsdangerous.BadData: abort(401, "invalid authentication for user %r" % authuser) if not val.startswith(authuser + "-"): abort(401, "mismatch credential for user %r" % authuser) if authuser == "root" or authuser == user: return abort(401, "user %r not authorized, requiring %r" % (authuser, user))
def get_forbidden(): return HTTPError(403, "Access denied.")
def show_link_stat(link_id): link_id = link_id[:-1] link = manager.get_link_by_id(link_id) if not link: raise HTTPError(404, 'Link not found') return {'link': link}
def user_name(name, db): user = db.query(User).filter_by(name=name).first() if user: return "<li>%s</li>" % user.name return HTTPError(404, 'Entity not found.')
def abort_authenticate(msg="authentication required"): err = HTTPError(401, msg) err.add_header(str('WWW-Authenticate'), 'Basic realm="pypi"') err.add_header(str('location'), "/+login") raise err
def validate(request, token): if request.forms.get('csrf_token') != token: raise HTTPError(403, "CSRF Attack Detected (bad or missing token)")
def login(email, password): id = u.login(email, password) if id: return JwtPlugin.encode({'id': id}) else: raise HTTPError(401)
def wrapped(*args, **kwargs): if is_authenticated(request): return func(*args, **kwargs) return HTTPError(404, "File does not exist.")
def _raise_error(self, code, message='not_found'): result = {'error': message} #result.update(kwargs) response.status = code raise HTTPError(code, message, exception=result)
def request_tasks(): data = request.json if data is None: raise HTTPError(400) try: jsonschema.validate(data, task_request_schema) except jsonschema.ValidationError: raise HTTPError(400) protocol = int(data['protocol']) agent_uuid = str(data['agent_id']) agent_name = str(data['agent_name']) agent_time = data['agent_time'] agent_capabilities = data['agent_capabilities'] max_tasks = int(data['max_tasks']) agent_location = data[ 'agent_location'] if 'agent_location' in data else None # Only protocol 1 supported for now if protocol != 1: raise HTTPError(400) try: session = create_session() except: logger.error("Failed to create database session for task request") raise HTTPError(500) try: query = session.query(Agent) agent = query.filter(Agent.uuid == agent_uuid).one() session.query(AgentCapability).filter( AgentCapability.agent_uuid == agent.uuid).delete() except NoResultFound: agent = Agent(uuid=agent_uuid, name=agent_name) session.add(agent) for agent_capability, agent_capability_info in agent_capabilities.items(): capability = AgentCapability(type=agent_capability, version=int( agent_capability_info['version']), agent=agent) session.add(capability) # Find all non-claimed tasks that the agent is able to handle and assign them to the agent query = session.query(AgentCapability, Task).filter(Task.assigned_agent_uuid.is_(None)).\ filter(AgentCapability.agent_uuid == agent.uuid).\ filter(and_(AgentCapability.type == Task.type, AgentCapability.version == Task.version)) tasks = [] # Assign available tasks to the agent and mark them as being in process for _, task in query[0:max_tasks]: task.assigned_agent_uuid = agent.uuid task.claimed = datetime.utcnow() tasks.append({ 'task_id': task.uuid, 'task_type': task.type, 'task_version': task.version, 'task_data': json.loads(task.data) }) agent.last_seen = datetime.utcnow() try: session.commit() except Exception: session.rollback() logger.error("Failed to commit database changes for task request") raise HTTPError(500) finally: session.close() logger.info( "Agent requested tasks - Agent's name and uuid: {}, {} - Agent was given following tasks: {}" .format(agent_name, agent_uuid, tasks)) return { "tasks": tasks, "return_time": (datetime.now(tz.tzlocal()) + timedelta(0, Settings.agent_return_time)).isoformat() }
def wrapper(*a, **ka): if not current_is_admin(): err = HTTPError(401, "Admin permission required") err.add_header('WWW-Authenticate', 'Basic realm="%s"' % realm) return err return func(*a, **ka)
def do_auth(*args, **kawrgs): logger.debug("Auth query") access = False try: path = bottle.request.path except: path = None url = bottle.request.url account = get_account() authkey = request.params.get('authkey', None) if authkey: logger.debug("Check authkey: '%s'" % authkey) account = check_authkey(authkey) if account: create_session(account) access = True else: logger.debug(" + Failed") if not account or account.user == 'anonymous': return HTTPError(403, "Forbidden") logger.debug(" + _id: %s" % account._id) if check_root(account): access = True else: if not authorized_grp and not unauthorized_grp: access = True else: logger.debug("Check authorized_grp: %s" % authorized_grp) if authorized_grp: for group in authorized_grp: if check_group_rights(account, group): access = True break logger.debug( "Check unauthorized_grp and overwrite access: %s" % unauthorized_grp) if unauthorized_grp: for group in unauthorized_grp: if check_group_rights(account, group): access = False break #logger.debug("Check path: '%s'" % path) #if path == "/canopsis/auth.html": # access = True logger.debug(" + Access: %s" % access) if access: logger.debug(" + Auth ok") return callback(*args, **kawrgs) else: logger.error(" + Invalid auth") return HTTPError(403, 'Insufficient rights')
def install_manifest(name, static=False, base_url=None, ipa_file=None, plist_file=None, icon_file=None, icon512_file=None, icon_gloss=True): class Ctx(object): pass ctx = Ctx() if static: ctx.base_url = base_url def _make_url(filepath): from os.path import basename url = ctx.base_url + '/' + urllib.quote(basename(filepath)) return url ctx.ipa_url = _make_url(ipa_file) ctx.icon_512_url = _make_url(icon512_file) ctx.icon_url = _make_url(icon_file) ctx.info_plist = plist_file ctx.icon_gloss = icon_gloss else: ctx.base_url = _base_url() + name ctx.ipa_url = None ctx.icon_512_url = None ctx.icon_url = None ctx.info_plist = None ctx.icon_gloss = True def _skywalker(arg, dirname, fnames): for fname in fnames: ext = os.path.splitext(fname)[-1] url = ctx.base_url + '/' + urllib.quote(fname) # first .ipa we find, that's our application file if ext == '.ipa': ctx.ipa_url = url elif ext == '.png': if _easy_match(fname, '512'): ctx.icon_512_url = url else: ctx.icon_url = url if _easy_match(fname, 'no_gloss'): ctx.icon_gloss = False elif ext == '.plist': if _easy_match(fname, 'info'): ctx.info_plist = os.path.join(dirname, fname) # ensure we only do a top level walk fnames[:] = [] os.path.walk(name, _skywalker, None) # $todo move this into install_page otherwise the 404 is invisible to # the user if not ctx.info_plist: return HTTPError(code=404, output='info plist not found') if not ctx.ipa_url: return HTTPError(code=404, output='ipa not found') if not ctx.icon_url: return HTTPError(code=404, output='icon not found') if not ctx.icon_512_url: return HTTPError(code=404, output='512 icon not found') response.content_type = "application/xml" plist = plistlib.readPlist(ctx.info_plist) meta = make_meta(plist['CFBundleIdentifier'], plist['CFBundleVersion'], name) assets = make_assets(ctx.ipa_url, ctx.icon_url, ctx.icon_512_url, ctx.icon_gloss) manifest = make_manifest(meta, assets) return manifest
def fail_dir(): F_OUTPUT = HTTPError(500, 'Internal Server Error') return F_OUTPUT
def assert_user_exists(self, user): if not self.has_user(user): raise HTTPError(404, 'No Such User')
def assert_logged_in(self): try: self.cork.require(role='archivist') except Exception: raise HTTPError(404, 'Not Logged In')
def upload_file(): try: data = request.files.get('file') node_data = request.files.get('nodeFile') except Exception as e: raise HTTPError(500, e) if data and data.file: if (request.query.alpha): alpha = request.query.alpha if (request.query.beta): beta = request.query.beta source_column = request.forms.get('source') target_column = request.forms.get('target') edge_column = request.forms.get('edge') node_file_id_col = request.forms.get('nodeFileIDColumn') with tempfile.NamedTemporaryFile('wb', delete=False) as f: f.write(data.file.read()) f_name = f.name f.close() with open(f_name, 'r') as tsvfile: header = [h.strip() for h in tsvfile.readline().split(',')] edge_attrs = [ e for e in header if e not in [source_column, target_column, edge_column] ] df = pd.read_csv(tsvfile, delimiter=',', engine='python', names=header) nice_cx = ndex2.create_nice_cx_from_pandas( df, source_field=source_column, target_field=target_column, source_node_attr=[], target_node_attr=[], edge_attr=edge_attrs, edge_interaction=edge_column) #NiceCXNetwork() if node_data and node_data.file: with tempfile.NamedTemporaryFile('wb', delete=False) as f2: f2.write(node_data.file.read()) f_name2 = f2.name f2.close() with open(f_name2, 'r') as tsvfile2: header2 = [h.strip() for h in tsvfile2.readline().split(',')] dtype_set = {h_n: str for h_n in header2} df2 = pd.read_csv(tsvfile2, delimiter=',', engine='python', names=header2, dtype=dtype_set) for index, row in df2.iterrows(): node_name_lookup = { node.get('n'): node.get('@id') for node_id, node in nice_cx.get_nodes() } node_id = node_name_lookup.get(row[node_file_id_col]) for header_column in header2: if header_column != node_file_id_col: if header_column == 'name': nice_cx.add_node_attribute( property_of=node_id, name='name_attr', values=row[header_column]) else: nice_cx.add_node_attribute( property_of=node_id, name=header_column, values=row[header_column]) upload_message = nice_cx.upload_to(upload_server, upload_username, upload_password) nice_cx.print_summary() return upload_message
def assert_user_is_owner(self, user): self.assert_logged_in() if self.is_owner(user): return True raise HTTPError(404, 'No Such User')
def abort_custom(code, msg): error = HTTPError(code, msg) error.status = "%s %s" %(code, msg) raise error
def assert_can_read(self, user, coll): if not self.can_read_coll(user, coll): raise HTTPError(404, 'No Read Access')
def delete(db, user, id): try: item = db.query(model.News).filter_by(id=id).one() db.delete(item) except NoResultFound: return HTTPError(404, 'Not found')
def assert_can_write(self, user, coll): if not self.can_write_coll(user, coll): raise HTTPError(404, 'No Write Access')
def assert_in(val, err): if not val: raise HTTPError(status=400, body=err)
def assert_can_admin(self, user, coll): if not self.is_anon(user): self.assert_logged_in() if not self.can_admin_coll(user, coll): raise HTTPError(404, 'No Admin Access')
def handle_routing(self, wb_url, user, coll, rec, type, is_embed=False, is_display=False, sources='', inv_sources='', redir_route=None): wb_url = self.add_query(wb_url) if user == '_new' and redir_route: return self.do_create_new_and_redir(coll, rec, wb_url, redir_route) not_found = False sesh = self.get_session() if sesh.is_new() and self.is_content_request(): self.redir_set_session() remote_ip = None frontend_cache_header = None if type in self.MODIFY_MODES: if not self.manager.has_recording(user, coll, rec): not_found = True self.manager.assert_can_write(user, coll) if self.manager.is_out_of_space(user): raise HTTPError(402, 'Out of Space') remote_ip = self._get_remote_ip() if self.manager.is_rate_limited(user, remote_ip): raise HTTPError(402, 'Rate Limit') if type == 'replay-coll': res = self.manager.has_collection_is_public(user, coll) not_found = not res if not_found: self._redir_if_sanitized(self.sanitize_title(coll), coll, wb_url) raise HTTPError(404, 'No Such Collection') if res != 'public': frontend_cache_header = ('Cache-Control', 'private') elif type == 'replay': if not self.manager.has_recording(user, coll, rec): not_found = True patch_rec = '' if not_found: title = rec if type in self.MODIFY_MODES: rec = self._create_new_rec(user, coll, title, type, no_dupe=True) # create patch recording as well if inv_sources and inv_sources != '*': patch_rec = self._create_new_rec(user, coll, self.patch_of_name(title), mode='patch', no_dupe=True) self._redir_if_sanitized(rec, title, wb_url) if type == 'replay': raise HTTPError(404, 'No Such Recording') elif inv_sources and inv_sources != '*': patch_rec = self.patch_of_name(rec, True) request.environ['SCRIPT_NAME'] = quote(request.environ['SCRIPT_NAME'], safe='/:') wb_url = self._context_massage(wb_url) wb_url_obj = WbUrl(wb_url) is_top_frame = (wb_url_obj.mod == self.frame_mod or wb_url_obj.mod.startswith('$br:')) if type == 'record' and is_top_frame: result = self.check_remote_archive(wb_url, type, wb_url_obj) if result: mode, wb_url = result new_url = '/{user}/{coll}/{rec}/{mode}/{url}'.format(user=user, coll=coll, rec=rec, mode=mode, url=wb_url) return self.redirect(new_url) kwargs = dict(user=user, coll_orig=coll, id=sesh.get_id(), rec_orig=rec, coll=quote(coll), rec=quote(rec, safe='/*'), type=type, sources=sources, inv_sources=inv_sources, patch_rec=patch_rec, ip=remote_ip, is_embed=is_embed, is_display=is_display) try: self.check_if_content(wb_url_obj, request.environ, is_top_frame) resp = self.render_content(wb_url, kwargs, request.environ) if not is_top_frame: self.add_csp_header(wb_url_obj, resp.status_headers) if frontend_cache_header: resp.status_headers.headers.append(frontend_cache_header) resp = HTTPResponse(body=resp.body, status=resp.status_headers.statusline, headers=resp.status_headers.headers) return resp except UpstreamException as ue: @self.jinja2_view('content_error.html') def handle_error(status_code, type, url, err_info): response.status = status_code return {'url': url, 'status': status_code, 'error': err_info.get('error'), 'user': self.get_view_user(user), 'coll': coll, 'rec': rec, 'type': type, 'app_host': self.app_host, } return handle_error(ue.status_code, type, ue.url, ue.msg)
def perfstore_nodes_get_values(start=None, stop=None): interval = None metas = request.params.get('nodes', default=None) aggregate_timemodulation = request.params.get('aggregate_timemodulation', default=True) aggregate_method = request.params.get('aggregate_method', default=pyperfstore_aggregate_method) aggregate_interval = request.params.get('aggregate_interval', default=None) aggregate_max_points = request.params.get('aggregate_max_points', default=pyperfstore_aggregate_maxpoints) consolidation_method = request.params.get('consolidation_method', default=None) output = [] if aggregate_method == "": aggregate_method = None if aggregate_timemodulation != "false" or aggregate_timemodulation != "False" or aggregate_timemodulation != 0: aggregate_timemodulation = True if not metas: logger.warning("Invalid arguments") return HTTPError(404, "Invalid arguments") metas = json.loads(metas) logger.debug("POST:") logger.debug(" + metas: %s" % metas) logger.debug(" + aggregate_timemodulation: %s" % aggregate_timemodulation) logger.debug(" + aggregate_method: %s" % aggregate_method) logger.debug(" + aggregate_interval: %s" % aggregate_interval) logger.debug(" + aggregate_max_points: %s" % aggregate_max_points) output = [] for meta in metas: _id = meta.get('id', None) # TODO: for futur version, use only this ! mstart = meta.get('from', start) mstop = meta.get('to', stop) if _id: output += perfstore_get_values( _id=meta['id'], start=mstart, stop=mstop, aggregate_method=aggregate_method, aggregate_interval=aggregate_interval, aggregate_max_points=aggregate_max_points, aggregate_timemodulation=aggregate_timemodulation) if consolidation_method and len(output) != 0: ##select right function if consolidation_method == 'mean': fn = pyperfstore2.utils.mean elif consolidation_method == 'min': fn = min elif consolidation_method == 'max' : fn = max elif consolidation_method == 'sum': fn = sum elif consolidation_method == 'delta': fn = lambda x: x[0] - x[-1] series = [] for serie in output: series.append(serie["values"]) output = [{ 'node': output[0]['node'], 'metric': consolidation_method, 'bunit': None, 'type': 'GAUGE', 'values': pyperfstore2.utils.consolidation(series, fn, 60) }] output = {'total': len(output), 'success': True, 'data': output} return output
try: account = caccount(storage.get(_id, account=caccount(user=login))) except Exception, err: logger.error(err) ## External # Try to provisionning account if not account and mode == 'plain': try: account = external_prov(login, password) except Exception, err: logger.error(err) ## Check if not account: return HTTPError(403, "Forbidden") logger.debug(" + Check password ...") if not account.is_enable(): return HTTPError(403, "This account is not enabled") if account.external and mode != 'plain': return HTTPError(403, "Send your password in plain text") access = None if account.external and mode == 'plain': access = external_auth(login, password) if access == None:
def tasks_create_file(): response = {} data = request.files.file pcap = request.POST.get("pcap", "") package = request.forms.get("package", "") timeout = request.forms.get("timeout", "") priority = request.forms.get("priority", 1) options = request.forms.get("options", "") machine = request.forms.get("machine", "") platform = request.forms.get("platform", "") tags = request.forms.get("tags", None) custom = request.forms.get("custom", "") memory = request.forms.get("memory", "False") clock = request.forms.get("clock", datetime.now().strftime("%m-%d-%Y %H:%M:%S")) if clock is False or clock is None: clock = datetime.now().strftime("%m-%d-%Y %H:%M:%S") if "1970" in clock: clock = datetime.now().strftime("%m-%d-%Y %H:%M:%S") shrike_url = request.forms.get("shrike_url", None) shrike_msg = request.forms.get("shrike_msg", None) shrike_sid = request.forms.get("shrike_sid", None) shrike_refer = request.forms.get("shrike_refer", None) static = bool(request.POST.get("static", False)) unique = bool(request.forms.get("unique", False)) if memory.upper() == "FALSE" or memory == "0": memory = False else: memory = True enforce_timeout = request.forms.get("enforce_timeout", "False") if enforce_timeout.upper() == "FALSE" or enforce_timeout == "0": enforce_timeout = False else: enforce_timeout = True temp_file_path = store_temp_file(data.file.read(), data.filename) if unique and db.check_file_uniq(File(temp_file_path).get_sha256()): resp = {"error": True, "error_value": "Duplicated file, disable unique option to force submission"} return jsonize(resp) if pcap: if data.filename.lower().endswith(".saz"): saz = saz_to_pcap(temp_file_path) if saz: path = saz try: os.remove(temp_file_path) except: pass else: resp = {"error": True, "error_value": "Failed to convert PCAP to SAZ"} return jsonize(resp) else: path = temp_file_path task_id = db.add_pcap(file_path=path) task_ids = [task_id] else: try: task_ids = db.demux_sample_and_add_to_db( file_path=temp_file_path, package=package, timeout=timeout, options=options, priority=priority, machine=machine, platform=platform, custom=custom, memory=memory, enforce_timeout=enforce_timeout, tags=tags, clock=clock, shrike_url=shrike_url, shrike_msg=shrike_msg, shrike_sid=shrike_sid, shrike_refer=shrike_refer, static=static, ) except CuckooDemuxError as e: return HTTPError(500, e) response["task_ids"] = task_ids return jsonize(response)
def _view(*args, **kwargs): if request.environ.get('REMOTE_ADDR', "0") in ('127.0.0.1', 'localhost') \ or request.environ.get('HTTP_HOST','0') == '127.0.0.1:9666': return function(*args, **kwargs) else: return HTTPError(403, "Forbidden")
def faulty_server(): raise HTTPError(status=503, body='Service Unavailable')
def extract_mongo_query_params(): # construct query limit = 0 if "limit" in request.GET: try: limit = int(request.GET["limit"]) except ValueError: raise HTTPError(output="Param limit has to be an integer.") if limit < 0: limit = 0 fields = None if "fields" in request.GET: fields = request.GET["fields"].strip() fields = map(lambda v: v.strip(), fields.split(",")) sort = None if "sort" in request.GET: sort = request.GET["sort"].strip() sort = map(lambda v: v.strip(), sort.split(",")) for i in range(0, len(sort)): field = sort[i].split(" ") order = 1 if field[-1].lower() == "asc": field.pop() elif field[-1].lower() == "desc": order = -1 field.pop() field = " ".join(field) sort[i] = (field, order) count = False if "count" in request.GET: count = True # get query params start_bucket = 0 if "start_bucket" in request.GET: try: start_bucket = int(request.GET["start_bucket"]) except ValueError: raise HTTPError(output="Param start_bucket has to be an integer.") if start_bucket < 0: start_bucket = 0 end_bucket = sys.maxint if "end_bucket" in request.GET: try: end_bucket = int(request.GET["end_bucket"]) except ValueError: raise HTTPError(output="Param end_bucket has to be an integer.") if end_bucket < 0: end_bucket = 0 # the bucket resolution to query (number of buckets) resolution = 1 if "resolution" in request.GET: try: resolution = int(request.GET["resolution"]) except ValueError: raise HTTPError(output="Param resolution has to be an integer.") if resolution < 1: resolution = 1 # or set the bucket size directly bucket_size = None if "bucket_size" in request.GET: try: bucket_size = int(request.GET["bucket_size"]) except ValueError: raise HTTPError(output="Param bucket_size has to be an integer.") if bucket_size not in config.flow_bucket_sizes: raise HTTPError(output="This bucket size is not available.") # biflow aggregation # This simply removes the difference between srcIP and dstIP # (The smaller ip will always be the srcIP) biflow = False if "biflow" in request.GET: biflow = True # protocol filter include_protos = [] if "include_protos" in request.GET: include_protos = request.GET["include_protos"].strip() include_protos = map(lambda v: common.getValueFromProto(v.strip()), include_protos.split(",")) exclude_protos = [] if "exclude_protos" in request.GET: exclude_protos = request.GET["exclude_protos"].strip() exclude_protos = map(lambda v: common.getValueFromProto(v.strip()), exclude_protos.split(",")) # port filter include_ports = [] if "include_ports" in request.GET: include_ports = request.GET["include_ports"].strip() try: include_ports = map(lambda v: int(v.strip()), include_ports.split(",")) except ValueError: raise HTTPError(output="Ports have to be integers.") exclude_ports = [] if "exclude_ports" in request.GET: exclude_ports = request.GET["exclude_ports"].strip() try: exclude_ports = map(lambda v: int(v.strip()), exclude_ports.split(",")) except ValueError: raise HTTPError(output="Ports have to be integers.") # ip filter include_ips = [] if "include_ips" in request.GET: include_ips = request.GET["include_ips"].strip() include_ips = map(lambda v: int(v.strip()), include_ips.split(",")) exclude_ips = [] if "exclude_ips" in request.GET: exclude_ips = request.GET["exclude_ips"].strip() exclude_ips = map(lambda v: int(v.strip()), exclude_ips.split(",")) # get buckets and aggregate if bucket_size == None: bucket_size = db.getBucketSize(start_bucket, end_bucket, resolution) # only stated fields will be available, all others will be aggregated toghether # filter for known aggregation values #if fields != None: # fields = [v for v in fields if v in config.flow_aggr_values] black_others = False if "black_others" in request.GET: black_others = True aggregate = [] if "aggregate" in request.GET: aggregate = request.GET["aggregate"].strip() aggregate = map(lambda v: v.strip(), aggregate.split(",")) result = {} result["fields"] = fields print "Fields: " + str(fields) result["sort"] = sort result["limit"] = limit result["count"] = count result["start_bucket"] = start_bucket result["end_bucket"] = end_bucket result["resolution"] = resolution result["bucket_size"] = bucket_size result["biflow"] = biflow result["include_ports"] = include_ports result["exclude_ports"] = exclude_ports result["include_ips"] = include_ips result["exclude_ips"] = exclude_ips result["include_protos"] = include_protos result["exclude_protos"] = exclude_protos result["batch_size"] = 1000 result["aggregate"] = aggregate result["black_others"] = black_others return result
def docs(filename): if filename != "privacy.html" and filename != "support.html" and filename != "about.html" and filename != "consent.html" and filename != "approval_letter.pdf": logging.error("Request for unknown filename " % filename) return HTTPError(404, "Don't try to hack me, you evil spammer") else: return static_file(filename, "%s/%s" % (static_path, "docs"))
def abort_link(id): try: PYLOAD.stopDownloads([id]) return {"response": "success"} except: return HTTPError()
def docs(filename): if filename != "privacy" and filename != "support" and filename != "about" and filename != "consent": return HTTPError(404, "Don't try to hack me, you evil spammer") else: return static_file("%s.html" % filename, "%s/%s" % (static_path, "docs"))
def err(): err = HTTPError(401, "Access denied") err.add_header("WWW-Authenticate", 'Basic realm="%s"' % "admin area") return err
def _call(self): if request.auth not in self._credentials: error = HTTPError(401, "Access denied") error.add_header("WWW-Authenticate", 'Basic realm="private"') return error method = request.headers.get("SOAPAction") xml_body = etree.fromstring(request.body.read())[0] response.content_type = 'text/xml; charset="UTF-8"' valide_session_response = self._validate_session_id(method, xml_body) if valide_session_response is not None: return valide_session_response if method == "newSession": result = etree.Element("newSessionResult") session_id = len(self._subscribe_list) result.set("session-id", str(session_id)) result.set("ifmap-publisher-id", "111") result.set("max-poll-result-size", str(self._ITEM_MAX_MSG_SIZE)) self._subscribe_list.append(None) msg = "New session %d established." % session_id self._log(msg, SandeshLevel.SYS_DEBUG) return self._RSP_ENVELOPE % {"result": etree.tostring(result)} elif method == "subscribe": session_id = int(xml_body[0].get("session-id")) self._subscribe_list[session_id] = PriorityQueue() buffer = StringIO() try: VncIfmapServer._export_root_graph(buffer) self._subscribe_list[session_id].put((1, time(), "searchResult", buffer.getvalue())) finally: buffer.close() result = etree.Element("subscribeReceived") msg = "Session %d has subscribed to the root graph" % session_id self._log(msg, SandeshLevel.SYS_DEBUG) return self._RSP_ENVELOPE % {"result": etree.tostring(result)} elif method == "poll": session_id = int(xml_body[0].get("session-id")) queue = self._subscribe_list[session_id] if queue is None: msg = ( "Session ID %d did not subscribed to the graph's root. " "Please subscribe before polling." % session_id ) self._log(msg, SandeshLevel.SYS_WARN) result = etree.Element("errorResult", errorCode="AccessDenied") err_str = etree.SubElement(result, "errorString") err_str.text = msg return self._RSP_ENVELOPE % {"result": etree.tostring(result)} _, _, action, items = queue.get() while True: try: _, _, new_action, new_item = queue.peek(timeout=1) except Empty: break if new_action != action: break if (len(items) + len(new_item)) > self._ITEM_MAX_MSG_SIZE: break try: items += queue.get_nowait()[3] except Empty: break poll_str = '<pollResult><%s name="root">%s</%s></pollResult>' % (action, items, action) msg = "Session %d polled and get %s" % (session_id, action) self._log(msg, SandeshLevel.SYS_DEBUG) return self._RSP_ENVELOPE % {"result": poll_str} elif method == "search": # grab ident string; lookup graph with match meta and return start_name = xml_body[0][0].get("name") match_links = xml_body[0].get("match-links", "all") if match_links != "all": match_links = set(match_links.split(" or ")) result_filter = xml_body[0].get("result-filter", "all") if result_filter != "all": result_filter = set(result_filter.split(" or ")) visited_nodes = set([]) result_items = [] def visit_node(ident_name): if ident_name in visited_nodes: return visited_nodes.add(ident_name) # add all metas on current to result, visit further nodes to_visit_nodes = set([]) ident_str = VncIfmapServer._graph[ident_name]["ident"] links = VncIfmapServer._graph[ident_name]["links"] property_items = "" for link_key, link_info in links.iteritems(): meta_name = link_key.split()[0] if "other" in link_info: to_visit_nodes.add(link_key.split()[1]) if result_filter != "all" and meta_name in result_filter: result_items.append( "<resultItem>%s%s%s</resultItem>" % (ident_str, link_info["other"], link_info["meta"]) ) elif result_filter != "all" and meta_name in result_filter: property_items += link_info["meta"][10:-11] if property_items: result_items.append( "<resultItem>%s<metadata>%s" "</metadata></resultItem>" % (ident_str, property_items) ) # all metas on ident walked for new_node in to_visit_nodes: visit_node(new_node) # end visit_node visit_node(start_name) search_str = "<searchResult>%s</searchResult>" % "".join(result_items) return VncIfmapServer._RSP_ENVELOPE % {"result": search_str} else: msg = "IF-MAP method '%s' is not implemented." % method self._log(msg, level=SandeshLevel.SYS_DEBUG) result = etree.Element("errorResult", errorCode="InvalidMethod") err_str = etree.SubElement(result, "errorString") err_str.text = msg return self._RSP_ENVELOPE % {"result": etree.tostring(result)}