def call(self, function_name, url_params=None, params=None, data=None, auth=None, headers=None, timeout=None): url_params = url_params or {} method = self._get_method(function_name) pattern = self._get_pattern(function_name) url = self._get_url(pattern, url_params) try: return method(url, params=params, data=data, auth=auth, headers=headers, verify=self.verify, timeout=timeout or self.timeout, proxies=self.proxies) except (ConnectionError, Timeout) as e: logger.debug(str(e)) raise ConnectionErrorException( "Can't connect to biicode, check internet connection!")
def _compute_modify(self, block_version, dependencies, request, biiout): ''' Params: block_version: Version to which dependencies are currently resolved to dependencies: {Declaration: set(BlockCellName)} request: FinderRequest ''' brl_block = block_version.block time = block_version.time # First, compute all block candidates that have to be considered block_candidates = {brl_block} # Remove those not wanted by our policy policy = request.policy block_candidates = policy.filter(block_candidates) current_block = self._store.read_block(brl_block) original_date = current_block.deltas[time].date delta_versions = self._filter_by_policy(block_candidates, policy, biiout, original_date, request) logger.debug("The heap is %s" % delta_versions) hypothesis = self._define_hypothesis(delta_versions, dependencies, request.block_names, biiout, block_version) return hypothesis
def checkin_files(hive_holder, settings, files, biiout): ''' Params: hive_holder: HiveHolder files: dict{BlockCellName: Item (str or bytes loaded from file)} biiout: biiout Returns: ProcessorChanges ''' logger.debug("----------- checkin ---------------") hive = hive_holder.hive hive.settings = settings processor_changes = ProcessorChanges() if files is None: return processor_changes block_files = {} for block_cell_name, filecontent in files.iteritems(): block_files.setdefault(block_cell_name.block_name, {})[block_cell_name.cell_name] = filecontent for block_name, files in block_files.iteritems(): checkin_block_files(hive_holder, block_name, files, processor_changes, biiout) for block_holder in hive_holder.block_holders: if block_holder.block_name not in block_files: processor_changes.deleted.update(block_holder.block_cell_names) hive_holder.add_holder(BlockHolder(block_holder.block_name, [])) hive_holder.delete_empty_blocks() hive.update(processor_changes) # Raises if max is overtaken changevalidator.check_hive_num_cells(hive) return processor_changes
def checkin_files(hive_holder, settings, files, biiout): ''' Params: hive_holder: HiveHolder files: dict{BlockCellName: Item (str or bytes loaded from file)} biiout: biiout Returns: ProcessorChanges ''' logger.debug("----------- checkin ---------------") hive_holder.settings = settings if files is None: return block_files = {} for block_cell_name, filecontent in files.iteritems(): block_files.setdefault(block_cell_name.block_name, {})[block_cell_name.cell_name] = filecontent for block_name, files in block_files.iteritems(): checkin_block_files(hive_holder, block_name, files, biiout) for block_holder in hive_holder.block_holders: if block_holder.block_name not in block_files: hive_holder.add_holder(BlockHolder(block_holder.block_name, [])) hive_holder.delete_empty_blocks()
def wrapper(*args, **kwargs): '''Check request and raise if its not https''' if request.headers.get('X-Forwarded-Proto', 'http') != 'https': logger.debug("Non http request blocked: %s" % request) raise self.default_non_ssl_http_response else: return callback(*args, **kwargs)
def _firmware_to_upload(bii, firmware_name=None): '''return the list of firmwares to upload in it''' bii_paths = bii.bii_paths firmwares_created = [ f.split('.hex')[0] for f in os.listdir(bii_paths.bin) if f.endswith("hex") ] logger.debug('Firmwares created: %s' % str(firmwares_created)) def _check_firmware_name(firmware_name, firmwares_created): for firmware in firmwares_created: if firmware_name == '': break if firmware_name == firmware: return firmware elif firmware_name in firmware: return firmware raise ClientException('Not a valid firmware name') if firmwares_created: if len(firmwares_created) > 1: if firmware_name: return _check_firmware_name(firmware_name, firmwares_created) bii.user_io.out.listitem('You have the following firmwares: ') for firmware in sorted(firmwares_created): bii.user_io.out.listitem(firmware, 1) else: return firmwares_created[0] else: raise ClientException('No firmware exists') firmware_name = bii.user_io.request_string('Firmware name') return _check_firmware_name(firmware_name, firmwares_created)
def _update_baud_rate(self, event=None): new_rate = self.button_combobox.get() if new_rate != self.baud_rate: self.baud_rate = new_rate self.ser.setBaudrate(new_rate) logger.debug('Updated serial speed to %s' % new_rate) self.update_idletasks()
def wrapper(self, *args, **kwargs): try: # Set custom headers of mac_digest and username self.set_custom_headers(self.user) return func(self, *args, **kwargs) except ForbiddenException as e: # User valid but not enough permissions logger.debug("Forbidden: %s" % str(e)) if self.user is None or self.rest_client.token is None: # token is None when you change user with user command # Anonymous is not enough, ask for a user self.user_io.out.info('Please log in to perform this action. If you don\'t have' ' an account sign up here: http://www.biicode.com') if self.user is None: logger.debug("User None, ask for it, anonymous not enough!") return retry_with_new_token(self, *args, **kwargs) else: # If our user receives a ForbiddenException propagate it, not log with other user raise e except AuthenticationException: # Token expired or not valid, so clean the token and repeat the call # (will be anonymous call but registering who is calling) self._store_login((self.user, None)) self.rest_client.token = None # Set custom headers of mac_digest and username self.set_custom_headers(self.user) return wrapper(self, *args, **kwargs)
def request_option( self, option_name, args=None, options=None, kls=None, default_option=None, one_line_options=False ): """ Asks the user to decide among a list Parameters: param option_name : Name of option inside args param args: List of arguments where to search for the option, if it's not present there, it will prompt the user for it param options: option list param kls: Returned class (eg: BlockName) param default: If user press 'enter', return default option param one_line_options: Shows available options in one line """ options = options or [] if args is None: args = [] value = getattr(args, option_name, None) # If an integer parameter is 0, we cant check if getattr(args, option_name) if value is None or value == "": logger.debug("Not parameter %s in args %s" % (option_name, args)) if len(options) == 1: ret = options.pop() self.out.input_text("%s: " % option_name) self.out.listitem("%s" % ret, 1) else: ret = self._request_while(option_name, options, kls, default_option, one_line_options) else: ret = self._get_option_from_args(option_name, args, options, kls, default_option) return ret
def enqueue_job(self, *args): if not BII_LOG_TO_REDIS: logger.debug('Skipping logging due to config') return global POOL if not self.async_process: # Call the method now! import importlib module_name = ".".join(self.worker.split(".")[0:-1]) themodule = importlib.import_module(module_name) call_method = getattr(themodule, self.worker.split(".")[-1]) call_method(*args) try: priority = Priority(self.priority) conn = self.connection or get_redis_connection() q = Queue(priority, connection=conn) return q.enqueue_call(self.worker, args=args, timeout=self.timeout, result_ttl=self.result_ttl) # NOTE: this rare way to call enqueue its needed, look at the code in queue module except ConnectionError as e: logger.warn("Error connecting redis, reconnecting...") raise e except Exception as e: logger.warn("Error enqueuing: %s" % str(e)) tb = traceback.format_exc() logger.warn(tb) raise e
def wrapper(*args, **kwargs): # Checks if already banned and throws HttpResponse if banned info, ip = self._check_banned() logger.debug("IP: %s, Time: %s Attempts: %s" % (ip, info.time, info.counter)) rv = callback(*args, **kwargs) # kwargs has :xxx variables from url self.increment_event_counter(info, ip) return rv
def get_block_info(self, brl_block): '''Check if auth_user can publish a block version specified by parameter block_version Returns: BlockInfo ''' try: self.security.check_read_block(brl_block) except NotInStoreException: # In this case, the block doesnt exist, but return information of -1 and permissions return self._get_new_block_info(brl_block) block_info = BlockInfo() try: self.security.check_write_block(brl_block) block_info.can_write = True except ForbiddenException: block_info.can_write = False try: block = self._store.read_block(brl_block) block_info.last_version = block.last_version() block_info.private = self.security.is_private(brl_block) except Exception as e: tb = traceback.format_exc() logger.debug(tb) logger.error("Something went wrong with %s" % e) raise BiiServiceException('Something went wrong') return block_info
def _compute_new(self, block_name, decls, policy, existing_block_names, biiresponse): try: biiresponse.info("Looking for %s..." % block_name) # branches = self._store.read_tracks(block_name) # branches.get_blocks() block_candidates = [ block_name + BranchName("%s/master" % block_name.user) ] block_candidates = policy.filter(block_candidates) delta_versions = self._filter_by_policy(block_candidates, policy, biiresponse) logger.debug("The heap is %s" % delta_versions) result = self._define_hypothesis(delta_versions, decls, existing_block_names, biiresponse) return result except ForbiddenException: # Propagate forbidden to client raise except NotInStoreException: biiresponse.warn("Can't find block candidate for: %s" % (str(block_name))) return [] except Exception: biiresponse.error("Fatal error in server while reading %s" % block_name) logger.error(traceback.format_exc()) return []
def wrapper(self, *args, **kwargs): try: # Set custom headers of mac_digest and username self.set_custom_headers(self.user) return func(self, *args, **kwargs) except ForbiddenException as e: # User valid but not enough permissions logger.debug("Forbidden: %s" % str(e)) if self.user is None or self.rest_client.token is None: # token is None when you change user with user command # Anonymous is not enough, ask for a user self.user_io.out.info( 'Please log in to perform this action. If you don\'t have' ' an account sign up here: http://www.biicode.com') if self.user is None: logger.debug( "User None, ask for it, anonymous not enough!") return retry_with_new_token(self, *args, **kwargs) else: # If our user receives a ForbiddenException propagate it, not log with other user raise e except AuthenticationException: # Token expired or not valid, so clean the token and repeat the call # (will be anonymous call but registering who is calling) self._store_login((self.user, None)) self.rest_client.token = None # Set custom headers of mac_digest and username self.set_custom_headers(self.user) return wrapper(self, *args, **kwargs)
def _firmware_to_upload(bii, firmware_name=None): '''return the list of firmwares to upload in it''' bii_paths = bii.bii_paths firmwares_created = [f.split('.hex')[0] for f in os.listdir(bii_paths.bin) if f.endswith("hex")] logger.debug('Firmwares created: %s' % str(firmwares_created)) def _check_firmware_name(firmware_name, firmwares_created): for firmware in firmwares_created: if firmware_name == '': break if firmware_name == firmware: return firmware elif firmware_name in firmware: return firmware raise ClientException('Not a valid firmware name') if firmwares_created: if len(firmwares_created) > 1: if firmware_name: return _check_firmware_name(firmware_name, firmwares_created) bii.user_io.out.listitem('You have the following firmwares: ') for firmware in sorted(firmwares_created): bii.user_io.out.listitem(firmware, 1) else: return firmwares_created[0] else: raise ClientException('No firmware exists') firmware_name = bii.user_io.request_string('Firmware name') return _check_firmware_name(firmware_name, firmwares_created)
def deps_process(biiapi, hive_holder, processor_changes, biiout, settings=None): """Try to find unresolved in the existing external references or in the external references of the previous execution, so moving a file from one place to another does not require a find """ logger.debug("---------- process deps --------") _change_local_versions(hive_holder) # to edition ones if you opened blocks _discover_tag_versions(hive_holder, biiapi, processor_changes, biiout) common_table = compute_common_table(hive_holder) # BlockVersionTable _update_resolved(biiapi, hive_holder, common_table, biiout) src_graph, references = compute_src_graph(hive_holder, common_table) dep_graph, closure, overwrites = build_closure(biiapi, references, common_table, settings, biiout) hive_dependencies = hive_holder.hive.hive_dependencies hive_dependencies.src_graph = src_graph # BlockVersionGraph hive_dependencies.dep_graph = dep_graph # BlockVersionGraph hive_dependencies.closure = closure hive_dependencies.references = references real_graph = src_graph + dep_graph real_graph.sanitize(biiout) _update_requirements(hive_holder, real_graph, overwrites, common_table, biiout) for block_holder in hive_holder.block_holders: biiconfig = block_holder.commit_config() # Resource if biiconfig: processor_changes.upsert(biiconfig.name, biiconfig.content, blob_changed=True) hive_holder.hive.update(processor_changes)
def _update_collection(self, collection_name, query, set_statement, upsert, trx_record): dbcol = self.db[collection_name] ret = dbcol.update(query, set_statement, upsert=upsert) if "error" in ret and ret['error'] is not None: raise MongoUpdateException("Error updating object: %s, %s" % (str(ret), query)) if "ok" in ret and ret['ok'] != 1: raise MongoUpdateException("Error updating object: %s, %s" % (str(ret), query)) if trx_record and not ret['updatedExisting']: raise MongoNotCurrentObjectException( "Object with txn counter not found!: %s" % query) if not upsert and not ret['updatedExisting']: raise MongoNotFoundUpdatingException("Object not found: %s" % query) # if upsert and not ret['updatedExisting']: #Nonsense, if upsert does insert instead of # update updatedExisting is False # raise MongoUpsertException("Error upserting: %s\nRet: %s" % (query, ret)) if "jnote" in ret: if ret['jnote'] == "journaling not enabled on this server": logger.warning("Mongo journaling not enabled in this server!!") else: logger.debug(ret['jnote']) return ret
def bson_jwt_call(self, function_name, deserializer=None, url_params={}, data=None, headers=None, response=None, timeout=None): # If we dont have token, send without jwtauth (anonymous) logger.debug("JWT Call %s" % str(function_name)) auth = JWTAuth(self.token) if self.token else None headers = headers or {} headers.update(self.custom_headers) headers['Content-Type'] = 'application/bson' if data is not None: data = str(encode_bson(data)) return self.call(function_name, url_params=url_params, data=data, headers=headers, auth=auth, deserializer=deserializer, response=response, timeout=timeout)
def __init__(self, base_url): self.base_url = base_url self.token = None # Anonymous until setted self.custom_headers = {} # Can set custom headers to each request logger.debug("Init rest api client pointing to: %s" % self.base_url) super(BiiRestApiClient, self).__init__(self.base_url + "/" + BiiRestApiClient.version, self.authorized_functions)
def __init__(self, base_url): self.base_url = base_url self.token = None # Anonymous until setted self.custom_headers = {} # Can set custom headers to each request logger.debug("Init rest api client pointing to: %s" % self.base_url) super(BiiRestApiClient, self).__init__( self.base_url + "/" + BiiRestApiClient.version, self.authorized_functions)
def testWorstCaseSimpleCSP(self): '''incompatible problem last hyp''' for i in range(CSPTest.NVAL): self.__csp[CSPTest.NVAR - 2][i].value = -1 self.__csp[CSPTest.NVAR - 1][i].value = -2 solver = CSPExact(self.__csp, None) solFound = solver.solveCSP() logger.debug(solver) self.assertEqual(False, solFound)
def __is_root_hyp_consistent(self): '''Determines block consistency for root hypothesis. At the moment not used''' for hyp in self.__root_hyp: if hyp.invalid: logger.debug('Root hyp invalid: {0}'.format(hyp)) return False else: return True
def __storeSol(self, depth): '''stores sol if it is no worse than current champion (limited by MAX_NUM_SOL)''' if depth >= self.__depth_max: if depth > self.__depth_max: self.__depth_max = depth self.__solSet[:] = [] logger.debug('new best solution found') self.__pathToSol(depth)
def testInitialInvalidCSPElem(self): '''invalid root_hyp does not affect incompatibility''' for i in range(CSPTest.NVAL): self.__csp[CSPTest.NVAR - 2][i].value = -1 self.__csp[CSPTest.NVAR - 1][i].value = 8 solver = CSPExact(self.__csp, [CSPElem(-2, True)]) solFound = solver.solveCSP() logger.debug(solver) self.assertEqual(True, solFound)
def testInitialCSPElem(self): ''' initial root_hyp makes csp incompatible''' for i in range(CSPTest.NVAL): self.__csp[CSPTest.NVAR - 2][i].value = -1 self.__csp[CSPTest.NVAR - 1][i].value = 8 solver = CSPExact(self.__csp, [CSPElem(-2, False)]) solFound = solver.solveCSP() logger.debug(solver) self.assertEqual(False, solFound)
def _define_hypothesis(self, delta_versions, decls, existing_block_names, biiresponse, cur_version=None): ''' Parameters: delta_versions: [(delta, block_version)], prioritized set of accepted hypothesis decls: {Declaration: set(BlockCellName)} existing_block_names = set(BlockName) cur_version: Current version that decls are resolved to Returns: list of hypothesis that match the required decls ''' result = [] #repeated = set() #previous = None for _, version in delta_versions: logger.debug('Analyzing hypothesis %s' % str(version)) block = self._store.read_block(version.block) snap = block.cells.get_all_ids(version.time) #logger.debug('Current snap %s' % snap) all_found, names_total, deps_dict = self._match_declarations( decls, block, snap, cur_version, version) if not all_found: biiresponse.debug( 'Version %s discarded, only contains files for declarations %s' % (str(version), deps_dict.keys())) continue # Store the current IDs and dep table #snap_contents = block.contents.get_ids(version.time) #cell_ids = {snap[k.cell_name] for k in names_total} #content_ids = {snap_contents[k.cell_name] for k in names_total if # k.cell_name in snap_contents} #dep_table = block.dep_tables.floor(version.time) #current = cell_ids, content_ids, dep_table # Only if the current option is different to the previous one # we dont want to check the same option twice #if previous != current and deps_dict: logger.debug('Building hypothesis for %s with %s' % (version, deps_dict)) # logger.debug('ref_dict %s' % ref_dict) hyp = Hypothesis(version, deps_dict, self.translator, existing_block_names, biiresponse) result.append(hyp) #previous = current # FIXME: now the limit of hypothesis is hardwired if len(result) >= FindService.MAX_HYP: break return result
def save_blob_if_modified(path, blob): '''save the file, but avoid touching it if the contents have not been modified. Useful e.g. for CMakeLists files. It uses blob to avoid CRLF issues''' try: old_content = Blob(load(path), blob.is_binary) except: old_content = None if blob != old_content: logger.debug('{0} has changed or was created'.format(path)) save(path, blob.load) return True return False
def has_setup_and_loop(parser): loop = setup = False for definition in parser.definitions: if definition.name == 'loop' and not definition.scope: logger.debug('loop found') loop = True if definition.name == 'setup' and not definition.scope: setup = True logger.debug('setup found') if setup and loop: return True return False
def test_txn_limit_counter_overflow(self): b1 = self.user update_if_current.TXN_MAX_C = 10 logger.debug("CONTADOR MAXIMO:" + str(update_if_current.TXN_MAX_C)) bclean = self.store.read_user(b1.ID) bdirty = self.store.read_user(b1.ID) for _ in range(update_if_current.TXN_MAX_C): # 10 updates in transaction (increments tx counter each) self.store.update_user(bclean) self.store.update_user(bdirty) # dirty but cheated!! counter overflowed
def _check_banned(self): '''Check if the ip is banned''' ip_address = get_user_ip() info = self._read_info(ip_address) if self._is_banned(info) and not self._ban_expired(info): logger.error(" BANNED IP BLOCKED! " + str(ip_address) + " Count: " + str(info.counter) + " Time left: " + str(self._ban_time_left(info)) + " s.") raise self.banned_http_response elif self._is_banned(info) and self._ban_expired(info): info = _reset_info() logger.debug("IP: %s, Time: %s Count: %s" % (ip_address, info.time, info.counter)) return info, ip_address
def possible_blocks(self): ''' Returns: { block_name: set(Declaration) } ''' possible_blocks = defaultdict(set) for declaration in self.unresolved: try: block = declaration.block() if block and block not in self.block_names: # FIXME: If block is in self.block_names the client could had filter that possible_blocks[block].add(declaration) except Exception as e: logger.debug('Could not obtain block from decl %s: %s' % (declaration, str(e))) return possible_blocks
def test_txn_limit_counter_overflow(self): b1 = self.user update_if_current.TXN_MAX_C = 10 logger.debug("CONTADOR MAXIMO:" + str(update_if_current.TXN_MAX_C)) bclean = self.store.read_user(b1.ID) bdirty = self.store.read_user(b1.ID) for _ in range(update_if_current.TXN_MAX_C): # 10 updates in transaction (increments tx counter each) self.store.update_user(bclean) self.store.update_user( bdirty) # dirty but cheated!! counter overflowed
def call(self, function_name, url_params=None, params=None, data=None, auth=None, headers=None, timeout=None): url_params = url_params or {} method = self._get_method(function_name) pattern = self._get_pattern(function_name) url = self._get_url(pattern, url_params) try: return method(url, params=params, data=data, auth=auth, headers=headers, verify=self.verify, timeout=timeout or self.timeout, proxies=self.proxies) except (ConnectionError, Timeout) as e: logger.debug(str(e)) raise ConnectionErrorException("Can't connect to biicode, check internet connection!")
def find(self, request, biiout): ''' Params: request: FinderRequest biiout: biiout Rerturns: FinderResult ''' if not request: raise ValueError('The find request is empty, nothing to find') logger.debug('---------FinderRequest ------------\n%s' % str(request)) result = FinderResult() # Copy unresolved and remove it if find the dependence result.unresolved = copy(request.unresolved) hypothesis = self._get_hypothesis(request, biiout) if not hypothesis: biiout.info("No block candidates found") return result biiout.info("Analyzing compatibility for found dependencies... ") '''# primitive combinator variant analyzer = CompatibilityAnalyzer(self._store, self._auth_user) analysis_result = analyzer.solve(hypothesis) # standard constraint variant csp = CSPExact(hypothesis, None) csp.solveCSP() analysis_result = csp.getCompatibleSol() logger.info(csp.print_info())''' # iterative deepening variant it = IterDeep(hypothesis, None, None) sol_found, analysis_result = it.start() if sol_found: logger.info("sol found: {0} iter".format(it.num_iter)) if analysis_result is None: biiout.error("Can't find a compatible solution") return result self._update_result(analysis_result, request, result, biiout) if not result.unresolved: if result.resolved: biiout.info('All dependencies resolved') elif not result.updated: biiout.info('Everything was up to date') logger.debug('Result %s' % result) return result
def get_user(self, token): """Gets the user from credentials object. None if no credentials. Can raise jwt.ExpiredSignature and jwt.DecodeError""" profile = self.get_profile(token) if not profile: return None username = profile.get("user", None) user = self.server_store.read_user(username) # Timestamp must match with the stored in user, if not, # this token is not valid (password has been changed) password_timestamp = profile["password_timestamp"] if password_timestamp != user.password_timestamp: logger.debug("Timestamp doesn't match!") raise jwt.DecodeError("Timestamp doesn't match!") return username
def update_hive_with_find_result(hive_holder, find_result, processor_changes): logger.debug("Applying find result %s" % find_result) blocks = hive_holder.blocks renames = find_result.update_renames for block_holder in hive_holder.block_holders: unresolved = set() includes = block_holder.includes paths_size = len(block_holder.paths) for declaration in block_holder.unresolved(): try: new_declaration, _ = declaration.prefix(includes, paths_size) except: new_declaration = declaration decl_block = new_declaration.block() if decl_block and decl_block not in blocks: unresolved.add(new_declaration) for version, dep_dict in find_result.resolved.iteritems(): for unr in unresolved: if unr in dep_dict: block_holder.requirements.add_version(version) for version, dep_dict in find_result.updated.iteritems(): external = block_holder.external_targets() # TODO: Factorize this pattern, it is becoming repetitive external_blocks = { e.block_name for e in external if e.block_name not in blocks } if version.block.block_name in external_blocks: block_holder.requirements.add_version(version) if renames: for r in block_holder.simple_resources: cell = r.cell if cell.dependencies.update_resolved(dep_dict, renames): modified_content = None for old, new in renames.iteritems(): ch = r.content.update_content_declaration(old, new) if ch: modified_content = r.content processor_changes.upsert(r.name, modified_content) for block_holder in hive_holder.block_holders: resource = block_holder.commit_config() if resource: processor_changes.upsert(resource.name, resource.content, blob_changed=True)
def _define_hypothesis(self, delta_versions, decls, existing_block_names, biiresponse, cur_version=None): ''' Parameters: delta_versions: [(delta, block_version)], prioritized set of accepted hypothesis decls: {Declaration: set(BlockCellName)} existing_block_names = set(BlockName) cur_version: Current version that decls are resolved to Returns: list of hypothesis that match the required decls ''' result = [] #repeated = set() #previous = None for _, version in delta_versions: logger.debug('Analyzing hypothesis %s' % str(version)) block = self._store.read_block(version.block) snap = block.cells.get_all_ids(version.time) #logger.debug('Current snap %s' % snap) all_found, names_total, deps_dict = self._match_declarations(decls, block, snap, cur_version, version) if not all_found: biiresponse.debug('Version %s discarded, only contains files for declarations %s' % (str(version), deps_dict.keys())) continue # Store the current IDs and dep table #snap_contents = block.contents.get_ids(version.time) #cell_ids = {snap[k.cell_name] for k in names_total} #content_ids = {snap_contents[k.cell_name] for k in names_total if # k.cell_name in snap_contents} #dep_table = block.dep_tables.floor(version.time) #current = cell_ids, content_ids, dep_table # Only if the current option is different to the previous one # we dont want to check the same option twice #if previous != current and deps_dict: logger.debug('Building hypothesis for %s with %s' % (version, deps_dict)) # logger.debug('ref_dict %s' % ref_dict) hyp = Hypothesis(version, deps_dict, self.translator, existing_block_names, biiresponse) result.append(hyp) #previous = current # FIXME: now the limit of hypothesis is hardwired if len(result) >= FindService.MAX_HYP: break return result
def get_published_resources(self, references): '''Returns published resources from given ids @param references: list of ids ''' def _get_not_found_refs(requested_refs, found_refs): not_found_refs = References() for block_version, cell_names in requested_refs.iteritems(): version_resources = found_refs.get(block_version, {}) missing = cell_names.difference(version_resources) if missing: not_found_refs[block_version] = missing return not_found_refs # Read from localDB first, if not present, read from remote and catch! for block_version in references.keys(): try: self.get_version_delta_info(block_version) except NotFoundException: self._out.error("Block %s has been deleted from server" % str(block_version)) references.pop(block_version) local_refs = self._store.get_published_resources(references) not_found_refs = _get_not_found_refs(references, local_refs) # Read from remote building references remote_refs = ReferencedResources() if len(not_found_refs) > 0: logger.info("NOT In localdb: %s" % str(not_found_refs)) for ref in not_found_refs: if ref.block not in self._retrieved_blocks: self._out.info("Downloading files from: %s" % ref.block.to_pretty()) self._retrieved_blocks.add(ref.block) remote_refs = self._restapi_manager.get_published_resources( not_found_refs) # Cache return in local database (and prepare return) if len(remote_refs) > 0: logger.debug("Remote read: %r" % remote_refs.explode().keys()) self._store.create_published_resources(remote_refs) all_refs = local_refs + remote_refs not_found_refs = _get_not_found_refs(references, all_refs) if not_found_refs: self._out.error("The following files " "could not be retrieved %s" % not_found_refs) return all_refs
def solve(self, hypothesis): combinator = Combinator(hypothesis) while True: combination = combinator.getNext() if any(c.invalid for c in combination): break if not combination: #print "NO MORE POSSIBLE COMBINATIONS" break #print "Testing combination: " + str(combination) if self.jointCompatibility(combination): logger.debug("I FOUND A SOLUTION %s", combination) return combination
def check_for_updates(self, biiout): """Calls get_server_info in remote api if TIME_BETWEEN_CHECKS have passed""" update_info = self.store.load() server_info = update_info.server_info last_check = update_info.time now = datetime.datetime.utcnow() # If we don't have information yet or its old information if last_check is None or (last_check + self.time_between_checks) <= now: try: server_info = self.biiapi.get_server_info() except Exception as e: # Don't care if we can't call. continue working logger.debug(e) server_info = ServerInfo() self.store.save(UpdateInfo(server_info, now)) # If have not passed TIME_BETWEEN_CHECKS, process old server_info return self._process_server_info(server_info, biiout)