Esempio n. 1
0
class GeneralizationLocalSearch(APIView):
    def __init__(self):
        self.DataMiningClient = DataMiningClient()
        pass

    def post(self, request, format=None):

        try:
            # Start data mining client
            self.DataMiningClient.startConnection()

            user_info = get_or_create_user_information(request.session,
                                                       request.user, 'EOSS')

            # Get selected arch id's
            behavioral = json.loads(request.POST['selected'])
            non_behavioral = json.loads(request.POST['non_selected'])

            featureExpression = request.POST['featureExpression']

            # Load architecture data from the session info
            dataset = Design.objects.filter(
                eosscontext_id__exact=user_info.eosscontext.id).all()

            problem = request.POST['problem']
            inputType = request.POST['input_type']

            drivingFeatures = self.DataMiningClient.runGeneralizationLocalSearch(
                problem, inputType, behavioral, non_behavioral, dataset,
                featureExpression)
            output = drivingFeatures

            # End the connection before return statement
            self.DataMiningClient.endConnection()
            return Response(output)

        except Exception as detail:
            logger.exception('Exception in getDrivingFeatures: ' + detail)
            self.DataMiningClient.endConnection()
            return Response('')
Esempio n. 2
0
class GetProblemParameters(APIView):
    def __init__(self):
        self.DataMiningClient = DataMiningClient()
        pass

    def post(self, request, format=None):
        try:
            # Start data mining client
            self.DataMiningClient.startConnection()

            problem = request.POST['problem']
            params = self.DataMiningClient.getProblemParameters(problem)

            # End the connection before return statement
            self.DataMiningClient.endConnection()
            return Response(params)

        except Exception as detail:
            logger.exception('Exception in GetProblemParameters: ' +
                             str(detail))
            self.DataMiningClient.endConnection()
            return Response('')
Esempio n. 3
0
class ComputeComplexity(APIView):
    def __init__(self):
        self.DataMiningClient = DataMiningClient()
        pass

    def post(self, request, format=None):
        try:
            # Start data mining client
            self.DataMiningClient.startConnection()

            # Get the expression
            expression = request.POST['expression']
            complexity = self.DataMiningClient.computeComplexity(expression)

            # End the connection before return statement
            self.DataMiningClient.endConnection()
            return Response(complexity)

        except Exception as detail:
            logger.exception('Exception in ComputeComplexity: ' + str(detail))
            self.DataMiningClient.endConnection()
            return Response('')
Esempio n. 4
0
class getProblemConceptHierarchy(APIView):
    def __init__(self):
        self.DataMiningClient = DataMiningClient()

    def post(self, request, format=None):
        try:
            sessionKey = request.session.session_key

            # Start data mining client
            self.DataMiningClient.startConnection()

            problem = request.POST['problem']
            params = json.loads(request.POST['params'])

            conceptHierarchy = None
            if problem == "ClimateCentric":
                params = AssigningProblemEntities(params["instrument_list"],
                                                  params["orbit_list"])
                conceptHierarhcy_ = self.DataMiningClient.client.getAssigningProblemConceptHierarchy(
                    sessionKey, problem, params)
                conceptHierarchy = {}
                conceptHierarchy['instanceMap'] = conceptHierarhcy_.instanceMap
                conceptHierarchy[
                    'superclassMap'] = conceptHierarhcy_.superclassMap
            else:
                raise NotImplementedError(
                    "Unsupported problem formulation: {0}".format(problem))

            # End the connection before return statement
            self.DataMiningClient.endConnection()
            return Response(conceptHierarchy)

        except Exception as detail:
            logger.exception(
                'Exception in calling getProblemConceptHierarchy: ' +
                str(detail))
            self.DataMiningClient.endConnection()
            return Response('')
Esempio n. 5
0
class SetProblemParameters(APIView):
    def __init__(self):
        self.DataMiningClient = DataMiningClient()
        pass

    def post(self, request, format=None):
        try:
            # Start data mining client
            self.DataMiningClient.startConnection()

            problem = request.POST['problem']
            params = json.loads(request.POST['params'])
            self.DataMiningClient.setProblemParameters(problem, params)

            # End the connection before return statement
            self.DataMiningClient.endConnection()
            return Response()

        except Exception as detail:
            logger.exception('Exception in ComputeComplexityOfFeatures: ' +
                             str(detail))
            self.DataMiningClient.endConnection()
            return Response('')
Esempio n. 6
0
def data_mining_run(designs, behavioral, non_behavioral, context):
    client = DataMiningClient()
    try:
        # Start connection with data_mining
        client.startConnection()

        support_threshold = 0.002
        confidence_threshold = 0.2
        lift_threshold = 1

        # features = client.getDrivingFeatures(behavioral, non_behavioral, designs, support_threshold, confidence_threshold, lift_threshold)
        features = client.runAutomatedLocalSearch(behavioral, non_behavioral,
                                                  designs, support_threshold,
                                                  confidence_threshold,
                                                  lift_threshold)

        # End the connection before return statement
        client.endConnection()

        result = []
        max_features = 3
        if len(features) > 3:
            pass
        else:
            max_features = len(features)

        for i in range(
                max_features):  # Generate answers for the first 3 features
            advice = feature_expression_to_string(features[i]['name'], context)
            result.append({"type": "Analyzer", "advice": advice})
        return result

    except Exception:
        logger.exception('Exception in running data mining')
        client.endConnection()
        return None
Esempio n. 7
0
class GetTaxonomicScheme(APIView):
    def __init__(self):
        self.DataMiningClient = DataMiningClient()

    def post(self, request, format=None):
        try:
            # Start data mining client
            self.DataMiningClient.startConnection()

            problem = request.POST['problem']
            params = json.loads(request.POST['params'])

            taxonomicScheme = self.DataMiningClient.getTaxonomicScheme(
                problem, params)

            # End the connection before return statement
            self.DataMiningClient.endConnection()
            return Response(taxonomicScheme)

        except Exception as detail:
            logger.exception('Exception in calling getTaxonomicScheme: ' +
                             str(detail))
            self.DataMiningClient.endConnection()
            return Response('')
Esempio n. 8
0
    def analyst_critic(self, this_design):
        result = []
        client = DataMiningClient()

        problem = self.context.eosscontext.problem
        if problem in self.assignation_problems:
            problem_type = 'binary'
        elif problem in self.partition_problems:
            problem_type = 'discrete'
        else:
            problem_type = 'unknown'

        try:
            # Start connection with data_mining
            client.startConnection()

            support_threshold = 0.02
            confidence_threshold = 0.2
            lift_threshold = 1

            behavioral = []
            non_behavioral = []

            dataset = Design.objects.filter(
                eosscontext_id__exact=self.context.eosscontext.id).all()

            if len(dataset) < 10:
                raise ValueError(
                    "Could not run data mining: the number of samples is less than 10"
                )
            else:

                utopiaPoint = [0.26, 0]
                temp = []
                # Select the top N% archs based on the distance to the utopia point
                for design in dataset:
                    outputs = json.loads(this_design.outputs)
                    id = design.id
                    dist = math.sqrt((outputs[0] - utopiaPoint[0])**2 +
                                     (outputs[1] - utopiaPoint[1])**2)
                    temp.append((id, dist))

                # Sort the list based on the distance to the utopia point
                temp = sorted(temp, key=lambda x: x[1])
                for i in range(len(temp)):
                    if i <= len(
                            temp
                    ) // 10:  # Label the top 10% architectures as behavioral
                        behavioral.append(temp[i][0])
                    else:
                        non_behavioral.append(temp[i][0])

            # Extract feature
            # features = client.getDrivingFeatures(behavioral, non_behavioral, designs, support_threshold, confidence_threshold, lift_threshold)
            features = client.runAutomatedLocalSearch(
                problem, problem_type, behavioral, non_behavioral, dataset,
                support_threshold, confidence_threshold, lift_threshold)

            advices = []
            if not len(features) == 0:

                # Compare features to the current design
                unsatisfied = get_feature_unsatisfied(features[0]['name'],
                                                      this_design,
                                                      self.context)
                satisfied = get_feature_satisfied(features[0]['name'],
                                                  this_design, self.context)

                if type(unsatisfied) is not list:
                    unsatisfied = [unsatisfied]

                if type(satisfied) is not list:
                    satisfied = [satisfied]

                for exp in unsatisfied:
                    if exp == "":
                        continue
                    advices.append(
                        "Based on the data mining result, I advise you to make the following change: "
                        + feature_expression_to_string(
                            exp, is_critique=True, context=self.context))

                for exp in satisfied:
                    if exp == "":
                        continue
                    advices.append(
                        "Based on the data mining result, these are the good features. Consider keeping them: "
                        + feature_expression_to_string(
                            exp, is_critique=False, context=self.context))

            # End the connection before return statement
            client.endConnection()

            for i in range(
                    len(advices)):  # Generate answers for the first 5 features
                advice = advices[i]
                result.append({"type": "Analyst", "advice": advice})
        except Exception as e:
            print("Exc in generating critic from data mining: " + str(e))
            traceback.print_exc(file=sys.stdout)
            client.endConnection()

        return result
Esempio n. 9
0
class SetProblemParameters(APIView):
    def __init__(self):
        self.DataMiningClient = DataMiningClient()

    def post(self, request, format=None):
        user_info = get_or_create_user_information(request.session,
                                                   request.user, 'EOSS')

        # Start listening for redis inputs to share through websockets
        connection = pika.BlockingConnection(
            pika.ConnectionParameters(host='localhost'))
        channel = connection.channel()

        sessionKey = request.session.session_key
        logger.debug(
            "SetProblemParameters (session key: {0})".format(sessionKey))

        channel.queue_declare(queue=sessionKey + '_problemSetting')
        channel.queue_purge(queue=sessionKey + '_problemSetting')

        def callback(ch, method, properties, body):
            thread_user_info = get_or_create_user_information(
                request.session, request.user, 'EOSS')
            message = json.loads(body)

            logger.debug(
                "Problem parameters received: (session key: {0})".format(
                    sessionKey))

            if message['type'] == 'entities':
                message['type'] = 'data.mining.problem.entities'

                # Look for channel to send back to user
                channel_layer = get_channel_layer()
                async_to_sync(channel_layer.send)(
                    thread_user_info.channel_name, message)

        channel.basic_consume(callback,
                              queue=sessionKey + '_problemSetting',
                              no_ack=True)
        thread = threading.Thread(target=channel.start_consuming)
        thread.start()

        try:
            # Start data mining client
            self.DataMiningClient.startConnection()

            problem = request.POST['problem']
            params = json.loads(request.POST['params'])

            if problem == "ClimateCentric":
                entities = AssigningProblemEntities(params['instrument_list'],
                                                    params['orbit_list'])
                self.DataMiningClient.client.setAssigningProblemEntities(
                    sessionKey, problem, entities)

            else:
                raise NotImplementedError(
                    "Unsupported problem formulation: {0}".format(problem))

            # End the connection before return statement
            self.DataMiningClient.endConnection()
            return Response()

        except Exception as detail:
            logger.exception('Exception in SetProblemParameters: ' +
                             str(detail))
            self.DataMiningClient.endConnection()
            return Response('')
Esempio n. 10
0
class GeneralizeFeature(APIView):
    def __init__(self):
        self.DataMiningClient = DataMiningClient()
        pass

    def post(self, request, format=None):
        # Start listening for redis inputs to share through websockets
        connection = pika.BlockingConnection(
            pika.ConnectionParameters(host='localhost'))
        channel = connection.channel()

        sessionKey = request.session.session_key
        logger.debug("GeneralizeFeature (session key: {0})".format(sessionKey))

        channel.queue_declare(queue=sessionKey + '_generalization')
        channel.queue_purge(queue=sessionKey + '_generalization')

        try:
            userInitiated = request.POST['userInitiated']
        except KeyError:
            userInitiated = None

        def callback(ch, method, properties, body):
            thread_user_info = get_or_create_user_information(
                request.session, request.user, 'EOSS')
            message = json.loads(body)

            if message['type'] == 'search_started':
                # Look for channel to send back to user
                channel_layer = get_channel_layer()
                async_to_sync(channel_layer.send)(
                    thread_user_info.channel_name, {
                        'type': 'data.mining.search.started',
                    })

            if message['type'] == 'search_finished':
                logger.debug('Ending the thread!')
                channel.stop_consuming()
                channel.close()

                message['type'] = 'data.mining.search.finished'
                message['searchMethod'] = 'generalization'
                message['userInitiated'] = userInitiated

                if 'features' in message:
                    if message['features'] != None and len(
                            message['features']) != 0:
                        logger.debug('Generalized features returned')

                # Look for channel to send back to user
                channel_layer = get_channel_layer()
                async_to_sync(channel_layer.send)(
                    thread_user_info.channel_name, message)

        channel.basic_consume(callback,
                              queue=sessionKey + '_generalization',
                              no_ack=True)
        thread = threading.Thread(target=channel.start_consuming)
        thread.start()

        try:
            # Start data mining client
            self.DataMiningClient.startConnection()

            # Get user information
            user_info = get_or_create_user_information(request.session,
                                                       request.user, 'EOSS')

            # Get selected arch id's
            behavioral = json.loads(request.POST['selected'])
            non_behavioral = json.loads(request.POST['non_selected'])

            rootFeatureExpression = request.POST['rootFeatureExpression']
            nodeFeatureExpression = request.POST['nodeFeatureExpression']

            # Load architecture data from the session info
            dataset = Design.objects.filter(
                eosscontext_id__exact=user_info.eosscontext.id).all()

            problem = request.POST['problem']
            inputType = request.POST['input_type']

            logger.debug('generalizeFeature() called ... ')
            logger.debug('b_length:{0}, nb_length:{1}, narchs:{2}'.format(
                len(behavioral), len(non_behavioral), len(dataset)))

            _all_archs = []
            if inputType == "binary":
                for arch in dataset:
                    _all_archs.append(
                        BinaryInputArchitecture(arch.id,
                                                json.loads(arch.inputs),
                                                json.loads(arch.outputs)))

                self.DataMiningClient.client.generalizeFeatureBinary(
                    sessionKey, problem, behavioral, non_behavioral,
                    _all_archs, rootFeatureExpression, nodeFeatureExpression)

            elif inputType == "discrete":
                raise NotImplementedError()

            # End the connection before return statement
            self.DataMiningClient.endConnection()
            return Response(None)

        except Exception as detail:
            logger.exception('Exception in calling GeneralizeFeature()')
            self.DataMiningClient.endConnection()
            return Response('')
Esempio n. 11
0
class GetDrivingFeatures(APIView):
    def __init__(self):
        self.DataMiningClient = DataMiningClient()

    def post(self, request, format=None):
        try:
            # Start data mining client
            self.DataMiningClient.startConnection()

            user_info = get_or_create_user_information(request.session,
                                                       request.user, 'EOSS')

            # Get threshold values for the metrics
            supp = float(request.POST['supp'])
            conf = float(request.POST['conf'])
            lift = float(request.POST['lift'])

            # Get selected arch id's
            selected = request.POST['selected']
            selected = selected[1:-1]
            selected_arch_ids = selected.split(',')
            # Convert strings to ints
            behavioral = []
            for s in selected_arch_ids:
                behavioral.append(int(s))

            # Get non-selected arch id's
            non_selected = request.POST['non_selected']
            non_selected = non_selected[1:-1]
            non_selected_arch_ids = non_selected.split(',')
            # Convert strings to ints
            non_behavioral = []
            for s in non_selected_arch_ids:
                non_behavioral.append(int(s))

            # Load architecture data from the session info
            dataset = Design.objects.filter(
                eosscontext_id__exact=user_info.eosscontext.id).all()

            problem = request.POST['problem']
            inputType = request.POST['input_type']

            logger.debug('getDrivingFeatures() called ... ')
            logger.debug('b_length:{0}, nb_length:{1}, narchs:{2}'.format(
                len(behavioral), len(non_behavioral), len(dataset)))

            _archs = []
            if inputType == "binary":
                for arch in dataset:
                    _archs.append(
                        BinaryInputArchitecture(arch.id,
                                                json.loads(arch.inputs),
                                                json.loads(arch.outputs)))
                _features = self.DataMiningClient.client.getDrivingFeaturesBinary(
                    problem, behavioral, non_behavioral, _archs, supp, conf,
                    lift)

            elif inputType == "discrete":
                for arch in dataset:
                    _archs.append(
                        DiscreteInputArchitecture(arch.id,
                                                  json.loads(arch.inputs),
                                                  json.loads(arch.outputs)))
                _features = self.DataMiningClient.client.getDrivingFeaturesDiscrete(
                    problem, behavioral, non_behavioral, _archs, supp, conf,
                    lift)

            else:
                raise NotImplementedError(
                    "Unsupported input type: {0}".format(inputType))

            features = []
            for df in _features:
                features.append({
                    'id': df.id,
                    'name': df.name,
                    'expression': df.expression,
                    'metrics': df.metrics,
                    'complexity': df.complexity
                })

            # End the connection before return statement
            self.DataMiningClient.endConnection()
            return Response(features)

        except Exception as detail:
            logger.exception('Exception in getDrivingFeatures: ' + str(detail))
            self.DataMiningClient.endConnection()
            return Response('')
Esempio n. 12
0
class GetMarginalDrivingFeatures(APIView):
    def __init__(self):
        self.DataMiningClient = DataMiningClient()

    def post(self, request, format=None):
        try:
            # Start data mining client
            self.DataMiningClient.startConnection()

            user_info = get_or_create_user_information(request.session,
                                                       request.user, 'EOSS')
            sessionKey = request.session.session_key

            # Get selected arch id's
            behavioral = json.loads(request.POST['selected'])
            non_behavioral = json.loads(request.POST['non_selected'])

            featureExpression = request.POST['featureExpression']
            logicalConnective = request.POST['logical_connective']

            # Load architecture data from the session info
            dataset = Design.objects.filter(
                eosscontext_id__exact=user_info.eosscontext.id).all()

            problem = request.POST['problem']
            inputType = request.POST['input_type']

            logger.debug('getMarginalDrivingFeatures() called ... ')
            logger.debug('b_length:{0}, nb_length:{1}, narchs:{2}'.format(
                len(behavioral), len(non_behavioral), len(dataset)))

            _all_archs = []
            if inputType == "binary":

                # Start listening for redis inputs to share through websockets
                connection = pika.BlockingConnection(
                    pika.ConnectionParameters(host='localhost'))
                channel = connection.channel()
                channel.queue_declare(queue=sessionKey + '_localSearch')
                channel.queue_purge(queue=sessionKey + '_localSearch')

                def callback(ch, method, properties, body):
                    thread_user_info = get_or_create_user_information(
                        request.session, request.user, 'EOSS')
                    message = json.loads(body)

                    if message['type'] == 'search_started':
                        # Look for channel to send back to user
                        channel_layer = get_channel_layer()
                        async_to_sync(channel_layer.send)(
                            thread_user_info.channel_name, {
                                'type': 'data.mining.search.started',
                            })

                    if message['type'] == 'search_finished':
                        message['type'] = 'data.mining.search.finished'
                        message['searchMethod'] = 'localSearch'

                        logger.debug('Ending the thread!')
                        channel.stop_consuming()
                        channel.close()

                        if 'features' in message:
                            if message['features'] != None and len(
                                    message['features']) != 0:
                                logger.debug(
                                    'Features from local search returned')

                        # Look for channel to send back to user
                        channel_layer = get_channel_layer()
                        async_to_sync(channel_layer.send)(
                            thread_user_info.channel_name, message)

                channel.basic_consume(callback,
                                      queue=sessionKey + '_localSearch',
                                      no_ack=True)
                thread = threading.Thread(target=channel.start_consuming)
                thread.start()

                for arch in dataset:
                    _all_archs.append(
                        BinaryInputArchitecture(arch.id,
                                                json.loads(arch.inputs),
                                                json.loads(arch.outputs)))
                self.DataMiningClient.client.getMarginalDrivingFeaturesBinary(
                    sessionKey, problem, behavioral, non_behavioral,
                    _all_archs, featureExpression, logicalConnective)
                _features = []

            elif inputType == "discrete":
                for arch in dataset:
                    _all_archs.append(
                        DiscreteInputArchitecture(arch.id,
                                                  json.loads(arch.inputs),
                                                  json.loads(arch.outputs)))
                _features = self.DataMiningClient.client.getMarginalDrivingFeaturesDiscrete(
                    sessionKey, problem, behavioral, non_behavioral,
                    _all_archs, featureExpression, logicalConnective)

            features = []
            for df in _features:
                features.append({
                    'id': df.id,
                    'name': df.name,
                    'expression': df.expression,
                    'metrics': df.metrics,
                    'complexity': df.complexity
                })

            # End the connection before return statement
            self.DataMiningClient.endConnection()
            return Response(features)

        except Exception as detail:
            logger.exception(
                'Exception in calling GetMarginalDrivingFeatures(): ' + detail)
            self.DataMiningClient.endConnection()
            return Response('')
Esempio n. 13
0
class GetDrivingFeaturesEpsilonMOEA(APIView):
    def __init__(self):
        self.DataMiningClient = DataMiningClient()

    def post(self, request, format=None):
        try:
            # Start data mining client
            self.DataMiningClient.startConnection()

            user_info = get_or_create_user_information(request.session,
                                                       request.user, 'EOSS')
            sessionKey = request.session.session_key

            # Get selected arch id's
            selected = request.POST['selected']
            selected = selected[1:-1]
            selected_arch_ids = selected.split(',')

            # Convert strings to ints
            behavioral = []
            for s in selected_arch_ids:
                behavioral.append(int(s))

            # Get non-selected arch id's
            non_selected = request.POST['non_selected']
            non_selected = non_selected[1:-1]
            non_selected_arch_ids = non_selected.split(',')
            # Convert strings to ints
            non_behavioral = []
            for s in non_selected_arch_ids:
                non_behavioral.append(int(s))

            # Load architecture data from the session info
            dataset = Design.objects.filter(
                eosscontext_id__exact=user_info.eosscontext.id).all()

            problem = request.POST['problem']
            inputType = request.POST['input_type']

            logger.debug('getDrivingFeaturesEpsilonMOEA() called ... ')
            logger.debug('b_length:{0}, nb_length:{1}, narchs:{2}'.format(
                len(behavioral), len(non_behavioral), len(dataset)))

            _archs = []
            if inputType == "binary":
                for arch in dataset:
                    _archs.append(
                        BinaryInputArchitecture(arch.id,
                                                json.loads(arch.inputs),
                                                json.loads(arch.outputs)))
                _features = self.DataMiningClient.client.getDrivingFeaturesEpsilonMOEABinary(
                    sessionKey, problem, behavioral, non_behavioral, _archs)

            elif inputType == "discrete":
                for arch in dataset:
                    _archs.append(
                        DiscreteInputArchitecture(arch.id,
                                                  json.loads(arch.inputs),
                                                  json.loads(arch.outputs)))
                _features = self.DataMiningClient.client.getDrivingFeaturesEpsilonMOEADiscrete(
                    sessionKey, problem, behavioral, non_behavioral, _archs)

            elif inputType == "continuous":
                for arch in dataset:
                    inputs = []
                    for i in arch['inputs']:
                        if i is None:
                            pass
                        else:
                            inputs.append(float(i))

                    _archs.append(
                        ContinuousInputArchitecture(arch['id'], inputs,
                                                    arch['outputs']))
                _features = self.DataMiningClient.client.getDrivingFeaturesEpsilonMOEAContinuous(
                    problem, behavioral, non_behavioral, _archs)

            features = []
            for df in _features:
                features.append({
                    'id': df.id,
                    'name': df.name,
                    'expression': df.expression,
                    'metrics': df.metrics
                })

            # End the connection before return statement
            self.DataMiningClient.endConnection()
            return Response(features)

        except Exception as detail:
            logger.exception('Exception in getDrivingFeatures: ' + str(detail))
            self.DataMiningClient.endConnection()
            return Response('')