def get_default_scopecookies(): """ Load and parse `$HOME/.diamond/NEWSCOPE` :return: """ scope_file = os.path.join(os.environ['HOME'], '.diamond', 'NEWSCOPE') data = open(scope_file, 'rt').read() cookies = [ScopeCookie.parse(c) for c in ScopeCookie.split(data)] return cookies
def _blaster_url(cookie): '''Cookie is a string, possibly representing a megacookie. For now, raise an exception if there are multiple cookies pointing to different blasters.''' map = get_blaster_map( [ScopeCookie.parse(c) for c in ScopeCookie.split(cookie)]) if not map: raise BadRequest('No JSON blaster specified in scope cookies') if len(map) > 1: raise BadRequest('Multiple JSON blasters not supported') return map.keys()[0]
def _blaster_url(cookie): '''Cookie is a string, possibly representing a megacookie. For now, raise an exception if there are multiple cookies pointing to different blasters.''' map = get_blaster_map([ScopeCookie.parse(c) for c in ScopeCookie.split(cookie)]) if not map: raise BadRequest('No JSON blaster specified in scope cookies') if len(map) > 1: raise BadRequest('Multiple JSON blasters not supported') return map.keys()[0]
def search(): if request.method == 'POST': # Parse form try: cookie = request.files['cookie'].read() orientation = request.form['orientation'] except KeyError: abort(400, 'Bad form submission') # Split megacookie into a list of cookies per JSON Blaster cookies = get_blaster_map([ScopeCookie.parse(c) for c in ScopeCookie.split(cookie)]) # Make sure at least one cookie specifies a Blaster if not cookies: abort(400, 'No cookies or no JSON Blaster specified') # Create searches def static_url(path): return urljoin(request.url_root, url_for('static', filename=path)) searches = [] for blaster, cookie_list in cookies.iteritems(): config = { 'cookies': [c.encode() for c in cookie_list], 'filters': [ { 'name': 'RGB', 'code': { 'uri': static_url('filters/fil_decode'), }, 'min_score': 1, }, { 'name': 'Orientation', 'code': { 'uri': static_url('filters/fil_orientation'), }, 'arguments': [orientation], 'min_score': 1, } ], } req = urllib2.Request(blaster, json.dumps(config), { 'Content-Type': 'application/json', 'User-Agent': 'webappfind/0.1', }) try: response = urllib2.urlopen(req) except urllib2.HTTPError, e: abort(400, e.read() or e.reason) except urllib2.URLError, e: abort(400, e.reason) searches.append(json.loads(response.read()))
class _SearchSpec(object): def __init__(self, data): # Load JSON try: config = json.loads(data) _search_schema.validate(config) except ValueError, e: raise HTTPError(400, str(e)) # Build cookies # Assume each "cookie" may actually be a megacookie try: self.cookies = [ ScopeCookie.parse(c) for mc in config['cookies'] for c in ScopeCookie.split(mc) ] except ScopeError, e: raise HTTPError(400, 'Invalid scope cookie: %s' % e)
def _create_search(self) -> DiamondSearch: search = DiamondSearch([ScopeCookie.parse(x) for x in self._dataset.cookies], [ FilterSpec(x.name, Blob(x.code), x.arguments, Blob(x.blob), x.dependencies, x.minScore, x.maxScore) for x in self._dataset.filters], False, list(self._dataset.attributes) + [ATTR_DATA]) for host in dict(search._cookie_map): if host not in self._dataset.hosts: del search._cookie_map[host] del search._connections[host] search._blast = _DiamondBlastSet(list(search._connections.values())) return search
def setup(self, params): '''Configure the search and return a list of SHA256 signatures not present in the blob cache.''' def log_header(desc): _log.info(' %s:', desc) def log_item(key, fmt, *args): _log.info(' %-14s ' + fmt, key + ':', *args) # Create filter stack filters = [] missing = set() _log.info('Filters:') for f in params.filters: unsupported = False try: if not Filter.source_available(self._state, f.code): missing.add(f.code) code_state = 'not cached' else: code_state = 'cached' except FilterUnsupportedSource: unsupported = True code_state = 'unsupported' try: if not Filter.source_available(self._state, f.blob): missing.add(f.blob) blob_state = 'not cached' else: blob_state = 'cached' except FilterUnsupportedSource: unsupported = True blob_state = 'unsupported' log_header(f.name) log_item('Code', '%s, %s', f.code, code_state) log_item('Blob', '%s, %s', f.blob, blob_state) log_item('Arguments', '%s', ', '.join(f.arguments) or '<none>') log_item('Dependencies', '%s', ', '.join(f.dependencies) or '<none>') log_item('Minimum score', '%f', f.min_score) log_item('Maximum score', '%f', f.max_score) if unsupported: raise DiamondRPCSchemeNotSupported() filters.append( Filter(f.name, f.code, f.blob, f.min_score, f.max_score, f.arguments, f.dependencies)) filterstack = FilterStack(filters) # Parse scope cookies try: cookies = [ScopeCookie.parse(c) for c in params.cookies] _log.info('Scope cookies:') for cookie in cookies: log_header(cookie.serial) log_item('Servers', '%s', ', '.join(cookie.servers)) log_item('Scopes', '%s', ', '.join(cookie.scopeurls)) log_item('Expires', '%s', cookie.expires) if self._state.config.security_cookie_no_verify: _log.warn('Bypassing cookie verification.') else: cookie.verify(self._state.config.serverids, self._state.config.certdata) scope = ScopeListLoader(self._state.config, self._server_id, cookies) except ScopeCookieExpired, e: _log.warning('%s', e) raise DiamondRPCCookieExpired()
def parse_cookie(self, data): return ScopeCookie.parse(data)
def generate_cookie(self): return ScopeCookie.generate(self.servers, self.scopeurls, self.expires, self.key).encode()
def setup(self, params): '''Configure the search and return a list of SHA256 signatures not present in the blob cache.''' def log_header(desc): _log.info(' %s:', desc) def log_item(key, fmt, *args): _log.info(' %-14s ' + fmt, key + ':', *args) # Create filter stack filters = [] missing = set() _log.info('Filters:') for f in params.filters: unsupported = False try: if not Filter.source_available(self._state, f.code): missing.add(f.code) code_state = 'not cached' else: code_state = 'cached' except FilterUnsupportedSource: unsupported = True code_state = 'unsupported' try: if not Filter.source_available(self._state, f.blob): missing.add(f.blob) blob_state = 'not cached' else: blob_state = 'cached' except FilterUnsupportedSource: unsupported = True blob_state = 'unsupported' log_header(f.name) log_item('Code', '%s, %s', f.code, code_state) log_item('Blob', '%s, %s', f.blob, blob_state) log_item('Arguments', '%s', ', '.join(f.arguments) or '<none>') log_item('Dependencies', '%s', ', '.join(f.dependencies) or '<none>') log_item('Minimum score', '%f', f.min_score) log_item('Maximum score', '%f', f.max_score) if unsupported: raise DiamondRPCSchemeNotSupported() filters.append(Filter(f.name, f.code, f.blob, f.min_score, f.max_score, f.arguments, f.dependencies)) filters = FilterStack(filters) # Parse scope cookies try: cookies = [ScopeCookie.parse(c) for c in params.cookies] _log.info('Scope cookies:') for cookie in cookies: log_header(cookie.serial) log_item('Servers', '%s', ', '.join(cookie.servers)) log_item('Expires', '%s', cookie.expires) cookie.verify(self._state.config.serverids, self._state.config.certdata) scope = ScopeListLoader(self._state.config, self._server_id, cookies) except ScopeCookieExpired, e: _log.warning('%s', e) raise DiamondRPCCookieExpired()