def get_message_and_code_from_exception(exc): code = getHttpCodeFromException(exc) if code == 500: logger.error("%s" % str(exc)) logger.error("%s" % traceback.format_exc()) msg = ("An unexpected error has occurred in Bii service and has " "been reported. We hope to fix it as soon as possible") return msg, 500 else: logger.info("Return code %s: %s" % (str(code), str(exc))) return exc.message, code
def get_message_and_code_from_exception(exc): code = getHttpCodeFromException(exc) if code == 500: logger.error("%s" % str(exc)) logger.error("%s" % traceback.format_exc()) msg = ("An unexpected error has occurred in Bii service and has " "been reported. We hope to fix it as soon as possible") return msg, 500 else: logger.info("Return code %s: %s" % (str(code), str(exc))) return exc.message, code
def find(self, request, biiout): ''' Params: request: FinderRequest biiout: biiout Rerturns: FinderResult ''' if not request: raise ValueError('The find request is empty, nothing to find') logger.debug('---------FinderRequest ------------\n%s' % str(request)) result = FinderResult() # Copy unresolved and remove it if find the dependence result.unresolved = copy(request.unresolved) hypothesis = self._get_hypothesis(request, biiout) if not hypothesis: biiout.info("No block candidates found") return result biiout.info("Analyzing compatibility for found dependencies... ") '''# primitive combinator variant analyzer = CompatibilityAnalyzer(self._store, self._auth_user) analysis_result = analyzer.solve(hypothesis) # standard constraint variant csp = CSPExact(hypothesis, None) csp.solveCSP() analysis_result = csp.getCompatibleSol() logger.info(csp.print_info())''' # iterative deepening variant it = IterDeep(hypothesis, None, None) sol_found, analysis_result = it.start() if sol_found: logger.info("sol found: {0} iter".format(it.num_iter)) if analysis_result is None: biiout.error("Can't find a compatible solution") return result self._update_result(analysis_result, request, result, biiout) if not result.unresolved: if result.resolved: biiout.info('All dependencies resolved') elif not result.updated: biiout.info('Everything was up to date') logger.debug('Result %s' % result) return result
def find(self, request, biiout): ''' Params: request: FinderRequest biiout: biiout Rerturns: FinderResult ''' if not request: raise ValueError('The find request is empty, nothing to find') logger.debug('---------FinderRequest ------------\n%s' % str(request)) result = FinderResult() # Copy unresolved and remove it if find the dependence result.unresolved = copy(request.unresolved) hypothesis = self._get_hypothesis(request, biiout) if not hypothesis: biiout.info("No block candidates found") return result biiout.info("Analyzing compatibility for found dependencies... ") '''# primitive combinator variant analyzer = CompatibilityAnalyzer(self._store, self._auth_user) analysis_result = analyzer.solve(hypothesis) # standard constraint variant csp = CSPExact(hypothesis, None) csp.solveCSP() analysis_result = csp.getCompatibleSol() logger.info(csp.print_info())''' # iterative deepening variant it = IterDeep(hypothesis, None, None) sol_found, analysis_result = it.start() if sol_found: logger.info("sol found: {0} iter".format(it.num_iter)) if analysis_result is None: biiout.error("Can't find a compatible solution") return result self._update_result(analysis_result, request, result, biiout) if not result.unresolved: if result.resolved: biiout.info('All dependencies resolved') elif not result.updated: biiout.info('Everything was up to date') logger.debug('Result %s' % result) return result
def get_published_resources(self, references): '''Returns published resources from given ids @param references: list of ids ''' def _get_not_found_refs(requested_refs, found_refs): not_found_refs = References() for block_version, cell_names in requested_refs.iteritems(): version_resources = found_refs.get(block_version, {}) missing = cell_names.difference(version_resources) if missing: not_found_refs[block_version] = missing return not_found_refs # Read from localDB first, if not present, read from remote and catch! for block_version in references.keys(): try: self.get_version_delta_info(block_version) except NotFoundException: self._out.error("Block %s has been deleted from server" % str(block_version)) references.pop(block_version) local_refs = self._store.get_published_resources(references) not_found_refs = _get_not_found_refs(references, local_refs) # Read from remote building references remote_refs = ReferencedResources() if len(not_found_refs) > 0: logger.info("NOT In localdb: %s" % str(not_found_refs)) for ref in not_found_refs: if ref.block not in self._retrieved_blocks: self._out.info("Downloading files from: %s" % ref.block.to_pretty()) self._retrieved_blocks.add(ref.block) remote_refs = self._restapi_manager.get_published_resources( not_found_refs) # Cache return in local database (and prepare return) if len(remote_refs) > 0: logger.debug("Remote read: %r" % remote_refs.explode().keys()) self._store.create_published_resources(remote_refs) all_refs = local_refs + remote_refs not_found_refs = _get_not_found_refs(references, all_refs) if not_found_refs: self._out.error("The following files " "could not be retrieved %s" % not_found_refs) return all_refs
def get_published_resources(self, references): '''Returns published resources from given ids @param references: list of ids ''' def _get_not_found_refs(requested_refs, found_refs): not_found_refs = References() for block_version, cell_names in requested_refs.iteritems(): version_resources = found_refs.get(block_version, {}) missing = cell_names.difference(version_resources) if missing: not_found_refs[block_version] = missing return not_found_refs # Read from localDB first, if not present, read from remote and catch! for block_version in references.keys(): try: self.get_version_delta_info(block_version) except NotFoundException: self._out.error("Block %s has been deleted from server" % str(block_version)) references.pop(block_version) local_refs = self._store.get_published_resources(references) not_found_refs = _get_not_found_refs(references, local_refs) # Read from remote building references remote_refs = ReferencedResources() if len(not_found_refs) > 0: logger.info("NOT In localdb: %s" % str(not_found_refs)) for ref in not_found_refs: if ref.block not in self._retrieved_blocks: self._out.info("Downloading files from: %s" % ref.block.to_pretty()) self._retrieved_blocks.add(ref.block) remote_refs = self._restapi_manager.get_published_resources(not_found_refs) # Cache return in local database (and prepare return) if len(remote_refs) > 0: logger.debug("Remote read: %r" % remote_refs.explode().keys()) self._store.create_published_resources(remote_refs) all_refs = local_refs + remote_refs not_found_refs = _get_not_found_refs(references, all_refs) if not_found_refs: self._out.error("The following files " "could not be retrieved %s" % not_found_refs) return all_refs
def wrapper(*args, **kwargs): '''Capture possible exceptions to manage the return''' logger.info("Called: %s" % (str(callback.__name__))) # Add the parameter handle as a keyword argument. try: if "response" in inspect.getargspec(context.callback)[0]: biiresponse = BiiResponse() kwargs["response"] = biiresponse else: biiresponse = None return_value = callback(*args, **kwargs) # kwargs has :xxx variables from url return self.prepare_response(return_value, biiresponse) except HTTPResponse: raise # Normal response!!!! except Exception as exc: message, code = get_message_and_code_from_exception(exc) return self.abort_response(code, message, biiresponse)
def wrapper(*args, **kwargs): '''Capture possible exceptions to manage the return''' logger.info("Called: %s" % (str(callback.__name__))) # Add the parameter handle as a keyword argument. try: if "response" in inspect.getargspec(context.callback)[0]: biiresponse = BiiResponse() kwargs["response"] = biiresponse else: biiresponse = None return_value = callback( *args, **kwargs) # kwargs has :xxx variables from url return self.prepare_response(return_value, biiresponse) except HTTPResponse: raise # Normal response!!!! except Exception as exc: message, code = get_message_and_code_from_exception(exc) return self.abort_response(code, message, biiresponse)
def match(self, block_cell_names, origin_block_cell_name=None, paths=None): #Try absolute bcn = self._block_cell_name() if bcn in block_cell_names: return set([bcn]) #Try relative try: self.extension_namelist() for name in self.extension_name: name_ext = os.path.normpath( os.path.join(os.path.dirname(origin_block_cell_name), name)) brl = BlockCellName(name_ext) if brl in block_cell_names: return set([brl]) except: pass # Try APPROXIMATE, only in same block if origin_block_cell_name: try: block_name = origin_block_cell_name.block_name normalized_include = self.normalizedName result = set() for name in block_cell_names: if name.block_name == block_name: # Approximate only find in same Block if name.endswith(normalized_include): tail = os.path.split(name)[1] if len(normalized_include) >= len(tail): result.add(name) if len(result) == 1: return result #TODO: Inform user of multiple matchs logger.info("Matchs for name %s are %s" % (self.name, result)) except Exception as e: logger.error("Approximate find failed %s" % str(e)) pass return set()
def match(self, block_cell_names, origin_block_cell_name=None, paths=None): # Try absolute try: brl = BlockCellName(self.name) if brl in block_cell_names: return set([brl]) except: pass # Try relative try: name = os.path.normpath(os.path.join(os.path.dirname(origin_block_cell_name), self.name)) brl = BlockCellName(name) if brl in block_cell_names: return set([brl]) except: pass # Try APPROXIMATE, only in same block if origin_block_cell_name: try: block_name = origin_block_cell_name.block_name normalized_include = self.normalizedName result = set() for name in block_cell_names: if name.block_name == block_name: # Approximate only find in same Block if name.endswith(normalized_include): tail = os.path.split(name)[1] if len(normalized_include) >= len(tail): result.add(name) if len(result) == 1: return result # TODO: Inform user of multiple matchs logger.info("Matchs for name %s are %s" % (self.name, result)) except Exception as e: logger.error("Approximate find failed %s" % str(e)) pass return set()
def make_dependent(self, block_s, elem_t): '''makes source block depend on cell target elem''' block_t = self._get_row_block(elem_t) low_t, high_t = self._get_range_block(block_t) if block_t < block_s: low_s, high_s = self._get_range_block(block_s) for row_s in range(low_s, high_s + 1): self.__graph[row_s][low_t: high_t + 1] = [0] * self.get_block_size(block_t) self.__graph[row_s][elem_t] = 1 if block_t > 0: elem = elem_t for block in reversed(range(block_t)): if elem == -1: # Independent elem = self._get_dep_elem(elem, block) continue low, high = self._get_range_block(block) self.__graph[row_s][low:high + 1] = self.__graph[elem][low:high + 1] if block > 0: elem = self._get_dep_elem(elem, block) else: logger.info('impossible to establish dependency')
def _parse_strings_comments(self, source): result = [] code = [] begin = 0 end = len(source) while begin < end: m = self.initial_pattern.search(source, begin) if m: # some was found start = m.start() opening = m.group() if begin != start: code.append(source[begin:start]) closing = self.limits[opening][0] type_ = self.limits[opening][1] size_closing = len(closing) if opening == '#': fin = self.find_directive_end(source, start + 1) elif closing == '"': fin = self.find_closing_quotes(source, start + 1) elif closing == "'": fin = start + 2 if source[start + 1] != '\\' else start + 3 else: fin = source.find(closing, start + 1, end) if fin == -1: logger.info( 'ERROR, closing character not found for %s at %s' % (opening, m.start())) break fin += size_closing result.append(ParserItem(type_, source[start:fin], start, fin)) begin = fin else: code.append(source[begin:end]) break return result, ''.join(code)
def _parse_strings_comments(self, source): result = [] code = [] begin = 0 end = len(source) while begin < end: m = self.initial_pattern.search(source, begin) if m: # some was found start = m.start() opening = m.group() if begin != start: code.append(source[begin:start]) closing = self.limits[opening][0] type_ = self.limits[opening][1] size_closing = len(closing) if opening == '#': fin = self.find_directive_end(source, start + 1) elif closing == '"': fin = self.find_closing_quotes(source, start + 1) elif closing == "'": fin = start + 2 if source[start + 1] != '\\' else start + 3 else: fin = source.find(closing, start + 1, end) if fin == -1: logger.info('ERROR, closing character not found for %s at %s' % (opening, m.start())) break fin += size_closing result.append(ParserItem(type_, source[start:fin], start, fin)) begin = fin else: code.append(source[begin:end]) break return result, ''.join(code)
def make_dependent(self, block_s, elem_t): '''makes source block depend on cell target elem''' block_t = self._get_row_block(elem_t) low_t, high_t = self._get_range_block(block_t) if block_t < block_s: low_s, high_s = self._get_range_block(block_s) for row_s in range(low_s, high_s + 1): self.__graph[row_s][low_t:high_t + 1] = [0] * self.get_block_size(block_t) self.__graph[row_s][elem_t] = 1 if block_t > 0: elem = elem_t for block in reversed(range(block_t)): if elem == -1: # Independent elem = self._get_dep_elem(elem, block) continue low, high = self._get_range_block(block) self.__graph[row_s][low:high + 1] = self.__graph[elem][low:high + 1] if block > 0: elem = self._get_dep_elem(elem, block) else: logger.info('impossible to establish dependency')
def install_plugins(self): self.bsonplugin = BSONBottlePlugin() # BiiResponse plugin. All rest methods has to return # (data serializable | None, biiresponse) or throw BiiServiceException subclass logger.info("Installing BiiReturnHandlerPlugin plugin...") self.biiresponseplugin = BiiReturnHandlerPlugin(self.bsonplugin) self.install(self.biiresponseplugin) # Very first of all, check SSL or die if BII_SSL_ENABLED: # In heroku true for all environments logger.info("Installing NonSSLBlockerBottlePlugin plugin...") nonsslblock = NonSSLBlockerBottlePlugin() self.install(nonsslblock) # First of all, check DOS attacks by IP to the API # Counts IP request, raise 401 if banned if getattr(self.store, 'ip_mc_collection', False): logger.info("Installing massive DOS blocker...") doslogin = DOSBlockerBottlePlugin(self.store.ip_mc_collection, delta=BII_DOS_ATTACK_DELTA_TIME, max_events=BII_DOS_ATTACK_MAX_REQUEST, bantime=BII_DOS_ATTACK_BAN_TIME, callback_ip_banned=self.callback_ip_banned_for_DOS, banned_http_response=self.banned_http_response_for_DOS) # TODO: Maybe configure a log alert (heroku) if we return 401 banned # to analyze the case and adjust limits? self.install(doslogin) # Second, check Http Basic auth logger.info("Installing http basic authentication plugin...") httpplugin = HttpBasicAuthenticationBottlePlugin() self.install(httpplugin) # And check auth JWT logger.info("Installing JWT authentication plugin...") jwt_manager = JWTCredentialsManagerFactory.new(self.store) jwt_plugin = JWTAuthenticationBottlePlugin(jwt_manager) self.install(jwt_plugin) # Third check excess of login error for an IP # Catch generic 401 (or 404 or other) error from authentication and stores IP, # raise 401 if already banned if getattr(self.store, 'ip_mc_collection', False): logger.info("Installing massive error blocker...") massiveerrorplugin = MassiveErrorBlockerBottlePlugin( self.store.ip_mc_collection, delta=BII_ERROR_ATTACK_DELTA_TIME, max_events=BII_ERROR_ATTACK_MAX_ATTEMPTS, bantime=BII_ERROR_ATTACK_BAN_TIME, callback_ip_banned=self.callback_ip_banned_for_many_errors, banned_http_response=self.banned_http_response_for_many_errors) self.install(massiveerrorplugin) # Last, parse BSON data logger.info("Installing bson plugin...") self.install(self.bsonplugin) # Logging actions if BII_ENABLED_BII_USER_TRACE: self.tracebottleplugin = BiiUserTraceBottlePlugin() logger.info("Installing BiiUserTraceBottlePlugin plugin...") self.install(self.tracebottleplugin)