class ComputeTypicality(APIView): def __init__(self): self.DataMiningClient = DataMiningClient() pass def post(self, request, format=None): try: # Start data mining client self.DataMiningClient.startConnection() # Get selected arch id's inputString = request.POST['input'] inputString = inputString[1:-1] inputSplit = inputString.split(',') # Convert strings to ints inputs = [] for i in inputSplit: inputs.append(int(i)) # Get the expression expression = request.POST['expression'] typicality = self.DataMiningClient.computeTypicality( inputs, expression) # End the connection before return statement self.DataMiningClient.endConnection() return Response(typicality) except Exception as detail: logger.exception('Exception in ComputeComplexity: ' + str(detail)) self.DataMiningClient.endConnection() return Response('')
class ComputeComplexityOfFeatures(APIView): def __init__(self): self.DataMiningClient = DataMiningClient() pass def post(self, request, format=None): try: # Start data mining client self.DataMiningClient.startConnection() # Get the expression expressions = request.POST['expressions'] expressions = json.loads(expressions) complexity = self.DataMiningClient.computeComplexityOfFeatures( expressions) # End the connection before return statement self.DataMiningClient.endConnection() return Response(complexity) except Exception as detail: logger.exception('Exception in ComputeComplexityOfFeatures: ' + str(detail)) self.DataMiningClient.endConnection() return Response('')
class GetDrivingFeatures(APIView): def __init__(self): self.DataMiningClient = DataMiningClient() pass def post(self, request, format=None): try: # Start data mining client self.DataMiningClient.startConnection() user_info = get_or_create_user_information(request.session, request.user, 'EOSS') # Get threshold values for the metrics supp = float(request.POST['supp']) conf = float(request.POST['conf']) lift = float(request.POST['lift']) # Get selected arch id's selected = request.POST['selected'] selected = selected[1:-1] selected_arch_ids = selected.split(',') # Convert strings to ints behavioral = [] for s in selected_arch_ids: behavioral.append(int(s)) # Get non-selected arch id's non_selected = request.POST['non_selected'] non_selected = non_selected[1:-1] non_selected_arch_ids = non_selected.split(',') # Convert strings to ints non_behavioral = [] for s in non_selected_arch_ids: non_behavioral.append(int(s)) # Load architecture data from the session info dataset = Design.objects.filter( eosscontext_id__exact=user_info.eosscontext.id).all() problem = request.POST['problem'] inputType = request.POST['input_type'] drivingFeatures = self.DataMiningClient.getDrivingFeatures( problem, inputType, behavioral, non_behavioral, dataset, supp, conf, lift) output = drivingFeatures # End the connection before return statement self.DataMiningClient.endConnection() return Response(output) except Exception as detail: logger.exception('Exception in getDrivingFeatures: ' + str(detail)) self.DataMiningClient.endConnection() return Response('')
class SimplifyFeatureExpression(APIView): def __init__(self): self.DataMiningClient = DataMiningClient() def post(self, request, format=None): try: sessionKey = request.session.session_key # Start data mining client self.DataMiningClient.startConnection() # Get problem name problem = request.POST['problem'] # Get the expression expression = request.POST['expression'] simplified_feature = self.DataMiningClient.client.simplifyFeatureExpression( sessionKey, problem, expression) # End the connection before return statement self.DataMiningClient.endConnection() return Response(simplified_expression) except Exception as detail: logger.exception('Exception in simplifying feature: ' + str(detail)) self.DataMiningClient.endConnection() return Response('')
class StopSearch(APIView): def __init__(self): self.DataMiningClient = DataMiningClient() def post(self, request, format=None): try: sessionKey = request.session.session_key # Start connection with DataMiningClient self.DataMiningClient.startConnection() user_info = get_or_create_user_information(request.session, request.user, 'EOSS') logger.debug("StopSearch (session key: {0})".format( request.session.session_key)) # Stop the generalization search self.DataMiningClient.client.stopSearch(sessionKey) # End the connection before return statement self.DataMiningClient.endConnection() return Response('Generalization stopped correctly!') except Exception as detail: logger.exception('Exception in StopSearch(): ' + str(detail)) self.DataMiningClient.endConnection() return Response('')
class GetMarginalDrivingFeatures(APIView): def __init__(self): self.DataMiningClient = DataMiningClient() pass def post(self, request, format=None): try: # Start data mining client self.DataMiningClient.startConnection() user_info = get_or_create_user_information(request.session, request.user, 'EOSS') # Get threshold values for the metrics supp = float(request.POST['supp']) conf = float(request.POST['conf']) lift = float(request.POST['lift']) # Get selected arch id's behavioral = json.loads(request.POST['selected']) non_behavioral = json.loads(request.POST['non_selected']) featureExpression = request.POST['featureExpression'] logicalConnective = request.POST['logical_connective'] # Load architecture data from the session info dataset = Design.objects.filter( eosscontext_id__exact=user_info.eosscontext.id).all() problem = request.POST['problem'] inputType = request.POST['input_type'] drivingFeatures = self.DataMiningClient.getMarginalDrivingFeatures( problem, inputType, behavioral, non_behavioral, dataset, featureExpression, logicalConnective, supp, conf, lift) output = drivingFeatures # End the connection before return statement self.DataMiningClient.endConnection() return Response(output) except Exception as detail: logger.exception('Exception in getDrivingFeatures: ' + detail) self.DataMiningClient.endConnection() return Response('')
def data_mining_run(designs, behavioral, non_behavioral, context): client = DataMiningClient() try: # Start connection with data_mining client.startConnection() support_threshold = 0.002 confidence_threshold = 0.2 lift_threshold = 1 # features = client.getDrivingFeatures(behavioral, non_behavioral, designs, support_threshold, confidence_threshold, lift_threshold) features = client.runAutomatedLocalSearch(behavioral, non_behavioral, designs, support_threshold, confidence_threshold, lift_threshold) # End the connection before return statement client.endConnection() result = [] max_features = 3 if len(features) > 3: pass else: max_features = len(features) for i in range( max_features): # Generate answers for the first 3 features advice = feature_expression_to_string(features[i]['name'], context) result.append({"type": "Analyzer", "advice": advice}) return result except Exception: logger.exception('Exception in running data mining') client.endConnection() return None
class SetProblemGeneralizedConcepts(APIView): def __init__(self): self.DataMiningClient = DataMiningClient() def post(self, request, format=None): try: # Start data mining client self.DataMiningClient.startConnection() sessionKey = request.session.session_key logger.debug( "SetProblemGeneralizedConcepts (session key: {0})".format( sessionKey)) problem = request.POST['problem'] params = json.loads(request.POST['params']) if problem == "ClimateCentric": orbit_generalized_concepts = [] instrument_generalized_concepts = [] for concept in params['orbit_extended_list']: if concept in params['orbit_list']: pass else: orbit_generalized_concepts.append(concept) for concept in params['instrument_extended_list']: if concept in params['instrument_list']: pass else: instrument_generalized_concepts.append(concept) entities = AssigningProblemEntities( instrument_generalized_concepts, orbit_generalized_concepts) self.DataMiningClient.client.setAssigningProblemGeneralizedConcepts( sessionKey, problem, entities) else: raise NotImplementedError( "Unsupported problem formulation: {0}".format(problem)) # End the connection before return statement self.DataMiningClient.endConnection() return Response() except Exception as detail: logger.exception('Exception in SetProblemParameters: ' + str(detail)) self.DataMiningClient.endConnection() return Response('')
class SetProblemParameters(APIView): def __init__(self): self.DataMiningClient = DataMiningClient() pass def post(self, request, format=None): try: # Start data mining client self.DataMiningClient.startConnection() problem = request.POST['problem'] params = json.loads(request.POST['params']) self.DataMiningClient.setProblemParameters(problem, params) # End the connection before return statement self.DataMiningClient.endConnection() return Response() except Exception as detail: logger.exception('Exception in ComputeComplexityOfFeatures: ' + str(detail)) self.DataMiningClient.endConnection() return Response('')
class GeneralizationLocalSearch(APIView): def __init__(self): self.DataMiningClient = DataMiningClient() pass def post(self, request, format=None): try: # Start data mining client self.DataMiningClient.startConnection() user_info = get_or_create_user_information(request.session, request.user, 'EOSS') # Get selected arch id's behavioral = json.loads(request.POST['selected']) non_behavioral = json.loads(request.POST['non_selected']) featureExpression = request.POST['featureExpression'] # Load architecture data from the session info dataset = Design.objects.filter( eosscontext_id__exact=user_info.eosscontext.id).all() problem = request.POST['problem'] inputType = request.POST['input_type'] drivingFeatures = self.DataMiningClient.runGeneralizationLocalSearch( problem, inputType, behavioral, non_behavioral, dataset, featureExpression) output = drivingFeatures # End the connection before return statement self.DataMiningClient.endConnection() return Response(output) except Exception as detail: logger.exception('Exception in getDrivingFeatures: ' + detail) self.DataMiningClient.endConnection() return Response('')
class GetProblemParameters(APIView): def __init__(self): self.DataMiningClient = DataMiningClient() def post(self, request, format=None): user_info = get_or_create_user_information(request.session, request.user, 'EOSS') try: sessionKey = request.session.session_key # Start data mining client self.DataMiningClient.startConnection() problem = request.POST['problem'] params = None if problem == "ClimateCentric": params_ = self.DataMiningClient.client.getAssigningProblemEntities( sessionKey, problem) params = {} params['leftSet'] = params_.leftSet params['rightSet'] = params_.rightSet conceptHierarhcy_ = self.DataMiningClient.client.getAssigningProblemConceptHierarchy( sessionKey, problem, AssigningProblemEntities(params['leftSet'], params['rightSet'])) params['instanceMap'] = conceptHierarhcy_.instanceMap params['superclassMap'] = conceptHierarhcy_.superclassMap else: raise NotImplementedError( "Unsupported problem formulation: {0}".format(problem)) # End the connection before return statement self.DataMiningClient.endConnection() return Response(params) except Exception as detail: logger.exception('Exception in GetProblemParameters: ' + str(detail)) self.DataMiningClient.endConnection() return Response('')
class ConvertToCNF(APIView): def __init__(self): self.DataMiningClient = DataMiningClient() pass def post(self, request, format=None): try: # Start data mining client self.DataMiningClient.startConnection() # Get the expression expression = request.POST['expression'] cnf_expression = self.DataMiningClient.convertToCNF(expression) # End the connection before return statement self.DataMiningClient.endConnection() return Response(cnf_expression) except Exception as detail: logger.exception('Exception in convertingToCNF: ' + str(detail)) self.DataMiningClient.endConnection() return Response('')
class GetProblemParameters(APIView): def __init__(self): self.DataMiningClient = DataMiningClient() pass def post(self, request, format=None): try: # Start data mining client self.DataMiningClient.startConnection() problem = request.POST['problem'] params = self.DataMiningClient.getProblemParameters(problem) # End the connection before return statement self.DataMiningClient.endConnection() return Response(params) except Exception as detail: logger.exception('Exception in GetProblemParameters: ' + str(detail)) self.DataMiningClient.endConnection() return Response('')
class GetTaxonomicScheme(APIView): def __init__(self): self.DataMiningClient = DataMiningClient() def post(self, request, format=None): try: # Start data mining client self.DataMiningClient.startConnection() problem = request.POST['problem'] params = json.loads(request.POST['params']) taxonomicScheme = self.DataMiningClient.getTaxonomicScheme( problem, params) # End the connection before return statement self.DataMiningClient.endConnection() return Response(taxonomicScheme) except Exception as detail: logger.exception('Exception in calling getTaxonomicScheme: ' + str(detail)) self.DataMiningClient.endConnection() return Response('')
class getProblemConceptHierarchy(APIView): def __init__(self): self.DataMiningClient = DataMiningClient() def post(self, request, format=None): try: sessionKey = request.session.session_key # Start data mining client self.DataMiningClient.startConnection() problem = request.POST['problem'] params = json.loads(request.POST['params']) conceptHierarchy = None if problem == "ClimateCentric": params = AssigningProblemEntities(params["instrument_list"], params["orbit_list"]) conceptHierarhcy_ = self.DataMiningClient.client.getAssigningProblemConceptHierarchy( sessionKey, problem, params) conceptHierarchy = {} conceptHierarchy['instanceMap'] = conceptHierarhcy_.instanceMap conceptHierarchy[ 'superclassMap'] = conceptHierarhcy_.superclassMap else: raise NotImplementedError( "Unsupported problem formulation: {0}".format(problem)) # End the connection before return statement self.DataMiningClient.endConnection() return Response(conceptHierarchy) except Exception as detail: logger.exception( 'Exception in calling getProblemConceptHierarchy: ' + str(detail)) self.DataMiningClient.endConnection() return Response('')
class SetProblemParameters(APIView): def __init__(self): self.DataMiningClient = DataMiningClient() def post(self, request, format=None): user_info = get_or_create_user_information(request.session, request.user, 'EOSS') # Start listening for redis inputs to share through websockets connection = pika.BlockingConnection( pika.ConnectionParameters(host='localhost')) channel = connection.channel() sessionKey = request.session.session_key logger.debug( "SetProblemParameters (session key: {0})".format(sessionKey)) channel.queue_declare(queue=sessionKey + '_problemSetting') channel.queue_purge(queue=sessionKey + '_problemSetting') def callback(ch, method, properties, body): thread_user_info = get_or_create_user_information( request.session, request.user, 'EOSS') message = json.loads(body) logger.debug( "Problem parameters received: (session key: {0})".format( sessionKey)) if message['type'] == 'entities': message['type'] = 'data.mining.problem.entities' # Look for channel to send back to user channel_layer = get_channel_layer() async_to_sync(channel_layer.send)( thread_user_info.channel_name, message) channel.basic_consume(callback, queue=sessionKey + '_problemSetting', no_ack=True) thread = threading.Thread(target=channel.start_consuming) thread.start() try: # Start data mining client self.DataMiningClient.startConnection() problem = request.POST['problem'] params = json.loads(request.POST['params']) if problem == "ClimateCentric": entities = AssigningProblemEntities(params['instrument_list'], params['orbit_list']) self.DataMiningClient.client.setAssigningProblemEntities( sessionKey, problem, entities) else: raise NotImplementedError( "Unsupported problem formulation: {0}".format(problem)) # End the connection before return statement self.DataMiningClient.endConnection() return Response() except Exception as detail: logger.exception('Exception in SetProblemParameters: ' + str(detail)) self.DataMiningClient.endConnection() return Response('')
class GeneralizeFeature(APIView): def __init__(self): self.DataMiningClient = DataMiningClient() pass def post(self, request, format=None): # Start listening for redis inputs to share through websockets connection = pika.BlockingConnection( pika.ConnectionParameters(host='localhost')) channel = connection.channel() sessionKey = request.session.session_key logger.debug("GeneralizeFeature (session key: {0})".format(sessionKey)) channel.queue_declare(queue=sessionKey + '_generalization') channel.queue_purge(queue=sessionKey + '_generalization') try: userInitiated = request.POST['userInitiated'] except KeyError: userInitiated = None def callback(ch, method, properties, body): thread_user_info = get_or_create_user_information( request.session, request.user, 'EOSS') message = json.loads(body) if message['type'] == 'search_started': # Look for channel to send back to user channel_layer = get_channel_layer() async_to_sync(channel_layer.send)( thread_user_info.channel_name, { 'type': 'data.mining.search.started', }) if message['type'] == 'search_finished': logger.debug('Ending the thread!') channel.stop_consuming() channel.close() message['type'] = 'data.mining.search.finished' message['searchMethod'] = 'generalization' message['userInitiated'] = userInitiated if 'features' in message: if message['features'] != None and len( message['features']) != 0: logger.debug('Generalized features returned') # Look for channel to send back to user channel_layer = get_channel_layer() async_to_sync(channel_layer.send)( thread_user_info.channel_name, message) channel.basic_consume(callback, queue=sessionKey + '_generalization', no_ack=True) thread = threading.Thread(target=channel.start_consuming) thread.start() try: # Start data mining client self.DataMiningClient.startConnection() # Get user information user_info = get_or_create_user_information(request.session, request.user, 'EOSS') # Get selected arch id's behavioral = json.loads(request.POST['selected']) non_behavioral = json.loads(request.POST['non_selected']) rootFeatureExpression = request.POST['rootFeatureExpression'] nodeFeatureExpression = request.POST['nodeFeatureExpression'] # Load architecture data from the session info dataset = Design.objects.filter( eosscontext_id__exact=user_info.eosscontext.id).all() problem = request.POST['problem'] inputType = request.POST['input_type'] logger.debug('generalizeFeature() called ... ') logger.debug('b_length:{0}, nb_length:{1}, narchs:{2}'.format( len(behavioral), len(non_behavioral), len(dataset))) _all_archs = [] if inputType == "binary": for arch in dataset: _all_archs.append( BinaryInputArchitecture(arch.id, json.loads(arch.inputs), json.loads(arch.outputs))) self.DataMiningClient.client.generalizeFeatureBinary( sessionKey, problem, behavioral, non_behavioral, _all_archs, rootFeatureExpression, nodeFeatureExpression) elif inputType == "discrete": raise NotImplementedError() # End the connection before return statement self.DataMiningClient.endConnection() return Response(None) except Exception as detail: logger.exception('Exception in calling GeneralizeFeature()') self.DataMiningClient.endConnection() return Response('')
class GetDrivingFeatures(APIView): def __init__(self): self.DataMiningClient = DataMiningClient() def post(self, request, format=None): try: # Start data mining client self.DataMiningClient.startConnection() user_info = get_or_create_user_information(request.session, request.user, 'EOSS') # Get threshold values for the metrics supp = float(request.POST['supp']) conf = float(request.POST['conf']) lift = float(request.POST['lift']) # Get selected arch id's selected = request.POST['selected'] selected = selected[1:-1] selected_arch_ids = selected.split(',') # Convert strings to ints behavioral = [] for s in selected_arch_ids: behavioral.append(int(s)) # Get non-selected arch id's non_selected = request.POST['non_selected'] non_selected = non_selected[1:-1] non_selected_arch_ids = non_selected.split(',') # Convert strings to ints non_behavioral = [] for s in non_selected_arch_ids: non_behavioral.append(int(s)) # Load architecture data from the session info dataset = Design.objects.filter( eosscontext_id__exact=user_info.eosscontext.id).all() problem = request.POST['problem'] inputType = request.POST['input_type'] logger.debug('getDrivingFeatures() called ... ') logger.debug('b_length:{0}, nb_length:{1}, narchs:{2}'.format( len(behavioral), len(non_behavioral), len(dataset))) _archs = [] if inputType == "binary": for arch in dataset: _archs.append( BinaryInputArchitecture(arch.id, json.loads(arch.inputs), json.loads(arch.outputs))) _features = self.DataMiningClient.client.getDrivingFeaturesBinary( problem, behavioral, non_behavioral, _archs, supp, conf, lift) elif inputType == "discrete": for arch in dataset: _archs.append( DiscreteInputArchitecture(arch.id, json.loads(arch.inputs), json.loads(arch.outputs))) _features = self.DataMiningClient.client.getDrivingFeaturesDiscrete( problem, behavioral, non_behavioral, _archs, supp, conf, lift) else: raise NotImplementedError( "Unsupported input type: {0}".format(inputType)) features = [] for df in _features: features.append({ 'id': df.id, 'name': df.name, 'expression': df.expression, 'metrics': df.metrics, 'complexity': df.complexity }) # End the connection before return statement self.DataMiningClient.endConnection() return Response(features) except Exception as detail: logger.exception('Exception in getDrivingFeatures: ' + str(detail)) self.DataMiningClient.endConnection() return Response('')
class GetMarginalDrivingFeatures(APIView): def __init__(self): self.DataMiningClient = DataMiningClient() def post(self, request, format=None): try: # Start data mining client self.DataMiningClient.startConnection() user_info = get_or_create_user_information(request.session, request.user, 'EOSS') sessionKey = request.session.session_key # Get selected arch id's behavioral = json.loads(request.POST['selected']) non_behavioral = json.loads(request.POST['non_selected']) featureExpression = request.POST['featureExpression'] logicalConnective = request.POST['logical_connective'] # Load architecture data from the session info dataset = Design.objects.filter( eosscontext_id__exact=user_info.eosscontext.id).all() problem = request.POST['problem'] inputType = request.POST['input_type'] logger.debug('getMarginalDrivingFeatures() called ... ') logger.debug('b_length:{0}, nb_length:{1}, narchs:{2}'.format( len(behavioral), len(non_behavioral), len(dataset))) _all_archs = [] if inputType == "binary": # Start listening for redis inputs to share through websockets connection = pika.BlockingConnection( pika.ConnectionParameters(host='localhost')) channel = connection.channel() channel.queue_declare(queue=sessionKey + '_localSearch') channel.queue_purge(queue=sessionKey + '_localSearch') def callback(ch, method, properties, body): thread_user_info = get_or_create_user_information( request.session, request.user, 'EOSS') message = json.loads(body) if message['type'] == 'search_started': # Look for channel to send back to user channel_layer = get_channel_layer() async_to_sync(channel_layer.send)( thread_user_info.channel_name, { 'type': 'data.mining.search.started', }) if message['type'] == 'search_finished': message['type'] = 'data.mining.search.finished' message['searchMethod'] = 'localSearch' logger.debug('Ending the thread!') channel.stop_consuming() channel.close() if 'features' in message: if message['features'] != None and len( message['features']) != 0: logger.debug( 'Features from local search returned') # Look for channel to send back to user channel_layer = get_channel_layer() async_to_sync(channel_layer.send)( thread_user_info.channel_name, message) channel.basic_consume(callback, queue=sessionKey + '_localSearch', no_ack=True) thread = threading.Thread(target=channel.start_consuming) thread.start() for arch in dataset: _all_archs.append( BinaryInputArchitecture(arch.id, json.loads(arch.inputs), json.loads(arch.outputs))) self.DataMiningClient.client.getMarginalDrivingFeaturesBinary( sessionKey, problem, behavioral, non_behavioral, _all_archs, featureExpression, logicalConnective) _features = [] elif inputType == "discrete": for arch in dataset: _all_archs.append( DiscreteInputArchitecture(arch.id, json.loads(arch.inputs), json.loads(arch.outputs))) _features = self.DataMiningClient.client.getMarginalDrivingFeaturesDiscrete( sessionKey, problem, behavioral, non_behavioral, _all_archs, featureExpression, logicalConnective) features = [] for df in _features: features.append({ 'id': df.id, 'name': df.name, 'expression': df.expression, 'metrics': df.metrics, 'complexity': df.complexity }) # End the connection before return statement self.DataMiningClient.endConnection() return Response(features) except Exception as detail: logger.exception( 'Exception in calling GetMarginalDrivingFeatures(): ' + detail) self.DataMiningClient.endConnection() return Response('')
def __init__(self): self.DataMiningClient = DataMiningClient()
def analyst_critic(self, this_design): result = [] client = DataMiningClient() problem = self.context.eosscontext.problem if problem in self.assignation_problems: problem_type = 'binary' elif problem in self.partition_problems: problem_type = 'discrete' else: problem_type = 'unknown' try: # Start connection with data_mining client.startConnection() support_threshold = 0.02 confidence_threshold = 0.2 lift_threshold = 1 behavioral = [] non_behavioral = [] dataset = Design.objects.filter( eosscontext_id__exact=self.context.eosscontext.id).all() if len(dataset) < 10: raise ValueError( "Could not run data mining: the number of samples is less than 10" ) else: utopiaPoint = [0.26, 0] temp = [] # Select the top N% archs based on the distance to the utopia point for design in dataset: outputs = json.loads(this_design.outputs) id = design.id dist = math.sqrt((outputs[0] - utopiaPoint[0])**2 + (outputs[1] - utopiaPoint[1])**2) temp.append((id, dist)) # Sort the list based on the distance to the utopia point temp = sorted(temp, key=lambda x: x[1]) for i in range(len(temp)): if i <= len( temp ) // 10: # Label the top 10% architectures as behavioral behavioral.append(temp[i][0]) else: non_behavioral.append(temp[i][0]) # Extract feature # features = client.getDrivingFeatures(behavioral, non_behavioral, designs, support_threshold, confidence_threshold, lift_threshold) features = client.runAutomatedLocalSearch( problem, problem_type, behavioral, non_behavioral, dataset, support_threshold, confidence_threshold, lift_threshold) advices = [] if not len(features) == 0: # Compare features to the current design unsatisfied = get_feature_unsatisfied(features[0]['name'], this_design, self.context) satisfied = get_feature_satisfied(features[0]['name'], this_design, self.context) if type(unsatisfied) is not list: unsatisfied = [unsatisfied] if type(satisfied) is not list: satisfied = [satisfied] for exp in unsatisfied: if exp == "": continue advices.append( "Based on the data mining result, I advise you to make the following change: " + feature_expression_to_string( exp, is_critique=True, context=self.context)) for exp in satisfied: if exp == "": continue advices.append( "Based on the data mining result, these are the good features. Consider keeping them: " + feature_expression_to_string( exp, is_critique=False, context=self.context)) # End the connection before return statement client.endConnection() for i in range( len(advices)): # Generate answers for the first 5 features advice = advices[i] result.append({"type": "Analyst", "advice": advice}) except Exception as e: print("Exc in generating critic from data mining: " + str(e)) traceback.print_exc(file=sys.stdout) client.endConnection() return result
class GetDrivingFeaturesEpsilonMOEA(APIView): def __init__(self): self.DataMiningClient = DataMiningClient() def post(self, request, format=None): try: # Start data mining client self.DataMiningClient.startConnection() user_info = get_or_create_user_information(request.session, request.user, 'EOSS') sessionKey = request.session.session_key # Get selected arch id's selected = request.POST['selected'] selected = selected[1:-1] selected_arch_ids = selected.split(',') # Convert strings to ints behavioral = [] for s in selected_arch_ids: behavioral.append(int(s)) # Get non-selected arch id's non_selected = request.POST['non_selected'] non_selected = non_selected[1:-1] non_selected_arch_ids = non_selected.split(',') # Convert strings to ints non_behavioral = [] for s in non_selected_arch_ids: non_behavioral.append(int(s)) # Load architecture data from the session info dataset = Design.objects.filter( eosscontext_id__exact=user_info.eosscontext.id).all() problem = request.POST['problem'] inputType = request.POST['input_type'] logger.debug('getDrivingFeaturesEpsilonMOEA() called ... ') logger.debug('b_length:{0}, nb_length:{1}, narchs:{2}'.format( len(behavioral), len(non_behavioral), len(dataset))) _archs = [] if inputType == "binary": for arch in dataset: _archs.append( BinaryInputArchitecture(arch.id, json.loads(arch.inputs), json.loads(arch.outputs))) _features = self.DataMiningClient.client.getDrivingFeaturesEpsilonMOEABinary( sessionKey, problem, behavioral, non_behavioral, _archs) elif inputType == "discrete": for arch in dataset: _archs.append( DiscreteInputArchitecture(arch.id, json.loads(arch.inputs), json.loads(arch.outputs))) _features = self.DataMiningClient.client.getDrivingFeaturesEpsilonMOEADiscrete( sessionKey, problem, behavioral, non_behavioral, _archs) elif inputType == "continuous": for arch in dataset: inputs = [] for i in arch['inputs']: if i is None: pass else: inputs.append(float(i)) _archs.append( ContinuousInputArchitecture(arch['id'], inputs, arch['outputs'])) _features = self.DataMiningClient.client.getDrivingFeaturesEpsilonMOEAContinuous( problem, behavioral, non_behavioral, _archs) features = [] for df in _features: features.append({ 'id': df.id, 'name': df.name, 'expression': df.expression, 'metrics': df.metrics }) # End the connection before return statement self.DataMiningClient.endConnection() return Response(features) except Exception as detail: logger.exception('Exception in getDrivingFeatures: ' + str(detail)) self.DataMiningClient.endConnection() return Response('')