def receive_json(self, content, **kwargs): """ Called when we get a text frame. Channels will JSON-decode the payload for us and pass it as the first argument. """ # Get an updated session store user_info = get_or_create_user_information(self.scope['session'], self.scope['user'], 'EOSS') experiment_context = user_info.eosscontext.experimentcontext if content.get('msg_type') == 'add_action': experiment_stage = experiment_context.experimentstage_set.all( ).order_by("id")[content['stage']] ExperimentAction.objects.create(experimentstage=experiment_stage, action=json.dumps( content['action']), date=datetime.datetime.utcnow()) self.send_json({ 'action': content['action'], 'date': datetime.datetime.utcnow().isoformat() }) elif content.get('msg_type') == 'update_state': experiment_context.current_state = json.dumps(content['state']) experiment_context.save() self.send_json({"state": content["state"]})
def get(self, request, format=None): user_info = get_or_create_user_information(request.session, request.user, 'EOSS') experiment_context = user_info.eosscontext.experimentcontext # Save experiment results to file with open( './experiment_API/results/' + str(experiment_context.experiment_id) + '.json', 'w') as f: json_experiment = { "experiment_id": experiment_context.experiment_id, "current_state": json.loads(experiment_context.current_state), "stages": [] } for stage in experiment_context.experimentstage_set.all(): print(stage.type, stage.end_state) json_stage = { "type": stage.type, "start_date": stage.start_date.isoformat(), "end_date": stage.end_date.isoformat(), "end_state": json.loads(stage.end_state), "actions": [] } for action in stage.experimentaction_set.all(): json_action = { "action": json.loads(action.action), "date": action.date.isoformat() } json_stage["actions"].append(json_action) json_experiment["stages"].append(json_stage) json.dump(json_experiment, f) experiment_context.delete() return Response('Experiment finished correctly!')
def post(self, request, format=None): try: # Start data mining client self.DataMiningClient.startConnection() user_info = get_or_create_user_information(request.session, request.user, 'EOSS') # Get selected arch id's behavioral = json.loads(request.POST['selected']) non_behavioral = json.loads(request.POST['non_selected']) featureExpression = request.POST['featureExpression'] # Load architecture data from the session info dataset = Design.objects.filter( eosscontext_id__exact=user_info.eosscontext.id).all() problem = request.POST['problem'] inputType = request.POST['input_type'] drivingFeatures = self.DataMiningClient.runGeneralizationLocalSearch( problem, inputType, behavioral, non_behavioral, dataset, featureExpression) output = drivingFeatures # End the connection before return statement self.DataMiningClient.endConnection() return Response(output) except Exception as detail: logger.exception('Exception in getDrivingFeatures: ' + detail) self.DataMiningClient.endConnection() return Response('')
def get(self, request, format=None): if request.user.is_authenticated: try: user_info = get_or_create_user_information(request.session, request.user, 'EOSS') # Get the problem type assignation_problems = ['SMAP', 'SMAP_JPL1', 'SMAP_JPL2', 'ClimateCentric'] partition_problems = ['Decadal2017Aerosols'] problem = user_info.eosscontext.problem if problem in assignation_problems: problem_type = 'binary' elif problem in partition_problems: problem_type = 'discrete' else: problem_type = 'unknown' # Set the path of the file where the data will be saved user_path = request.user.username filename = request.query_params['filename'] file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data', user_path, problem, filename) # Create the HttpResponse object with the appropriate CSV header. csv_data = open(file_path, "r").read() response = HttpResponse(csv_data, content_type='text/csv') response['Content-Disposition'] = 'attachment; filename="' + filename + '"' return response except Exception: raise ValueError("There has been an error when downloading the file") else: return Response('This is only available to registered users!')
def post(self, request, format=None): user_info = get_or_create_user_information(request.session, request.user, 'EOSS') problem = request.data['problem'] user_info.eosscontext.problem = problem user_info.eosscontext.save() user_info.save() return Response({})
def callback(ch, method, properties, body): thread_user_info = get_or_create_user_information( request.session, request.user, 'EOSS') message = json.loads(body) if message['type'] == 'search_started': # Look for channel to send back to user channel_layer = get_channel_layer() async_to_sync(channel_layer.send)( thread_user_info.channel_name, { 'type': 'data.mining.search.started', }) if message['type'] == 'search_finished': logger.debug('Ending the thread!') channel.stop_consuming() channel.close() message['type'] = 'data.mining.search.finished' message['searchMethod'] = 'generalization' message['userInitiated'] = userInitiated if 'features' in message: if message['features'] != None and len( message['features']) != 0: logger.debug('Generalized features returned') # Look for channel to send back to user channel_layer = get_channel_layer() async_to_sync(channel_layer.send)( thread_user_info.channel_name, message)
def post(self, request, format=None): try: # Start connection with VASSAR user_info = get_or_create_user_information(request.session, request.user, 'EOSS') port = user_info.eosscontext.vassar_port self.VASSARClient = VASSARClient(port) self.VASSARClient.startConnection() inputs = request.data['inputs'] inputs = json.loads(inputs) architectures = self.VASSARClient.runLocalSearch(inputs) for arch in architectures: arch['id'] = user_info.eosscontext.last_arch_id add_design(arch, user_info.eosscontext, False) user_info.save() # End the connection before return statement self.VASSARClient.endConnection() return Response(architectures) except Exception: logger.exception('Exception in evaluating an architecture') self.VASSARClient.endConnection() return Response('')
def post(self, request, format=None): # Try to authorize the user username = request.data['username'] password = request.data['password'] user = authenticate(request, username=username, password=password) if user is not None: # Try to look for user session object. If it exists, then the session will be changed to that. If not, # the current session information will be transferred to the user userinfo_qs = UserInformation.objects.filter(user__exact=user) if len(userinfo_qs) == 0: # Try to get or create a session user_info from the session and transfer it to the user userinfo = get_or_create_user_information(request.session, user) userinfo.user = user userinfo.session = None userinfo.save() # Log the user in login(request, user) # Return the login response return Response({ 'status': 'logged_in', 'username': username, 'permissions': [] }) else: # TODO: Get different messages based on auth error return Response({ 'status': 'auth_error', 'login_error': 'This combination of username and password is not valid!' })
def get(self, request, format=None): user_info = get_or_create_user_information(request.session, request.user, 'EOSS') problem = user_info.eosscontext.problem dataset_filename = user_info.eosscontext.dataset_name dataset_user = user_info.eosscontext.dataset_user response = { 'username': request.user.username, 'permissions': [], 'problem': problem, 'dataset_filename': dataset_filename, 'dataset_user': dataset_user } if request.user.is_authenticated: response['is_logged_in'] = True # Transform the database design data into a json for the frontend response['data'] = [] if len(user_info.eosscontext.design_set.all()) > 0: for design in user_info.eosscontext.design_set.all(): response['data'].append( {'id': design.id, 'inputs': json.loads(design.inputs), 'outputs': json.loads(design.outputs)}) response['modified_dataset'] = True else: response['modified_dataset'] = False else: response['is_logged_in'] = False return Response(response)
def post(self, request, format=None): if request.user.is_authenticated: try: # Start connection with VASSAR user_info = get_or_create_user_information( request.session, request.user, 'EOSS') port = user_info.eosscontext.vassar_port client = VASSARClient(port) client.startConnection() inputType = request.data['inputType'] status = None if inputType == 'binary': status = client.client.isGABinaryInputRunning() elif inputType == 'discrete': status = client.client.isGADiscreteInputRunning() else: raise ValueError( 'Unrecognized input type: {0}'.format(inputType)) # End the connection before return statement client.endConnection() return Response({'ga_status': status}) except Exception: logger.exception('Exception in starting the GA!') client.endConnection() return Response('') else: return Response('This is only available to registered users!')
def post(self, request, format=None): if request.user.is_authenticated: try: user_info = get_or_create_user_information(request.session, request.user, 'EOSS') # Get the problem type assignation_problems = ['SMAP', 'SMAP_JPL1', 'SMAP_JPL2', 'ClimateCentric'] partition_problems = ['Decadal2017Aerosols'] problem = user_info.eosscontext.problem if problem in assignation_problems: problem_type = 'binary' elif problem in partition_problems: problem_type = 'discrete' else: problem_type = 'unknown' # Set the path of the file where the data will be saved user_path = request.user.username filename = request.data['filename'] file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data', user_path, problem, filename) # input_num = int(request.data['input_num']) # input_type = request.data['input_type'] # output_num = int(request.data['output_num']) # Open the file with open(file_path, 'w', newline='') as csvfile: writer = csv.writer(csvfile) # Write header if problem_type == 'binary': design = user_info.eosscontext.design_set.first() num_outputs = len(json.loads(design.outputs)) writer.writerow(['Inputs'] + ['Output' + str(i) for i in range(num_outputs)]) elif problem_type == 'discrete': design = user_info.eosscontext.design_set.first() num_inputs = len(json.loads(design.inputs)) num_outputs = len(json.loads(design.outputs)) writer.writerow(['Input' + str(i) for i in range(num_inputs)] + ['Output' + str(i) for i in range(num_outputs)]) else: raise ValueError("Not implemented!") # Write designs for design in user_info.eosscontext.design_set.all(): inputs = json.loads(design.inputs) if problem_type == 'binary': input_list = [''.join(['1' if x else '0' for x in inputs])] elif problem_type == 'discrete': input_list = inputs else: raise ValueError("Not implemented!") output_list = json.loads(design.outputs) writer.writerow(input_list + output_list) return Response(filename + " has been saved correctly!") except Exception: raise ValueError("There has been an error when writing the file") else: return Response('This is only available to registered users!')
def post(self, request, format=None): new_port = request.data['port'] user_info = get_or_create_user_information(request.session, request.user, 'EOSS') user_info.eosscontext.vassar_port = new_port user_info.eosscontext.save() user_info.save() return Response('')
def get(self, request, stage, format=None): user_info = get_or_create_user_information(request.session, request.user, 'EOSS') experiment_context = user_info.eosscontext.experimentcontext experiment_stage = experiment_context.experimentstage_set.all()[stage] experiment_stage.start_date = datetime.datetime.utcnow() experiment_stage.save() return Response({'start_date': experiment_stage.start_date})
def get(self, request, stage, format=None): user_info = get_or_create_user_information(request.session, request.user, 'EOSS') experiment_context = user_info.eosscontext.experimentcontext experiment_stage = experiment_context.experimentstage_set.all( ).order_by("id")[stage] experiment_stage.end_date = datetime.datetime.utcnow() experiment_stage.end_state = experiment_context.current_state experiment_stage.save() return Response({'end_date': experiment_stage.end_date.isoformat()})
def get(self, request, format=None): # Check for experiments folder results_dir = './experiment_API/results' if not os.path.exists(results_dir): os.makedirs(results_dir) # Obtain ID number new_id = len(os.listdir(results_dir)) # Create File so ID does not get repeated open(os.path.join(results_dir, str(new_id) + '.json'), 'w') # Save experiment start info user_info = get_or_create_user_information(request.session, request.user, 'EOSS') # Ensure experiment is started again ExperimentContext.objects.filter( eosscontext_id__exact=user_info.eosscontext.id).delete() experiment_context = ExperimentContext( eosscontext=user_info.eosscontext, is_running=False, experiment_id=-1, current_state="") experiment_context.save() experiment_context.experiment_id = new_id # Specific to current experiment experiment_context.experimentstage_set.all().delete() experiment_context.experimentstage_set.create( type=stage_type(new_id, 0), start_date=datetime.datetime.now(), end_date=datetime.datetime.now(), end_state="") experiment_context.experimentstage_set.create( type=stage_type(new_id, 1), start_date=datetime.datetime.now(), end_date=datetime.datetime.now(), end_state="") # Save experiment started on database experiment_context.is_running = True experiment_context.save() # Prepare return for client experiment_stages = [] for stage in experiment_context.experimentstage_set.all(): experiment_stages.append(stage.type) return Response(experiment_stages)
def post(self, request, format=None): try: # Start data mining client self.DataMiningClient.startConnection() user_info = get_or_create_user_information(request.session, request.user, 'EOSS') # Get threshold values for the metrics supp = float(request.POST['supp']) conf = float(request.POST['conf']) lift = float(request.POST['lift']) # Get selected arch id's selected = request.POST['selected'] selected = selected[1:-1] selected_arch_ids = selected.split(',') # Convert strings to ints behavioral = [] for s in selected_arch_ids: behavioral.append(int(s)) # Get non-selected arch id's non_selected = request.POST['non_selected'] non_selected = non_selected[1:-1] non_selected_arch_ids = non_selected.split(',') # Convert strings to ints non_behavioral = [] for s in non_selected_arch_ids: non_behavioral.append(int(s)) # Load architecture data from the session info dataset = Design.objects.filter( eosscontext_id__exact=user_info.eosscontext.id).all() problem = request.POST['problem'] inputType = request.POST['input_type'] drivingFeatures = self.DataMiningClient.getDrivingFeatures( problem, inputType, behavioral, non_behavioral, dataset, supp, conf, lift) output = drivingFeatures # End the connection before return statement self.DataMiningClient.endConnection() return Response(output) except Exception as detail: logger.exception('Exception in getDrivingFeatures: ' + str(detail)) self.DataMiningClient.endConnection() return Response('')
def get(self, request, format=None): user_info = get_or_create_user_information(request.session, request.user, 'EOSS') if hasattr(user_info.eosscontext, 'experimentcontext'): experiment_context = user_info.eosscontext.experimentcontext if experiment_context.is_running: return Response({ 'is_running': True, 'experiment_data': json.loads(experiment_context.current_state) }) return Response({'is_running': False})
def get(self, request, format=None): if request.user.is_authenticated: user_info = get_or_create_user_information(request.session, request.user, 'EOSS') return Response({ 'show_background_search_feedback': user_info.eosscontext.activecontext.show_background_search_feedback, 'check_for_diversity': user_info.eosscontext.activecontext.check_for_diversity, 'show_arch_suggestions': user_info.eosscontext.activecontext.show_arch_suggestions, }) else: return Response({ 'error': 'User not logged in!' })
def post(self, request, format=None): user_info = get_or_create_user_information(request.session, request.user, 'EOSS') port = user_info.eosscontext.vassar_port vassar_client = VASSARClient(port) problem = user_info.eosscontext.problem # List of commands for a single subsystem command_list = [] command_list_request = request.data['command_list'] restricted_list = None if 'restricted_list' in request.data: restricted_list = request.data['restricted_list'] if command_list_request == 'general': command_list = command_lists.general_commands_list(restricted_list) elif command_list_request == 'datamining': command_list = command_lists.datamining_commands_list( restricted_list) elif command_list_request == 'analyst': command_list = command_lists.analyst_commands_list(restricted_list) elif command_list_request == 'critic': command_list = command_lists.critic_commands_list(restricted_list) elif command_list_request == 'historian': command_list = command_lists.historian_commands_list( restricted_list) elif command_list_request == 'measurements': command_list = command_lists.measurements_list() elif command_list_request == 'missions': command_list = command_lists.missions_list() elif command_list_request == 'technologies': command_list = command_lists.technologies_list() elif command_list_request == 'space_agencies': command_list = command_lists.agencies_list() elif command_list_request == 'objectives': command_list = command_lists.objectives_list( vassar_client, problem) elif command_list_request == 'orb_info': command_list = command_lists.orbits_info(problem) elif command_list_request == 'instr_info': command_list = command_lists.instruments_info(problem) elif command_list_request == 'analyst_instrument_parameters': command_list = command_lists.analyst_instrument_parameter_list( problem) elif command_list_request == 'analyst_instruments': command_list = command_lists.analyst_instrument_list(problem) elif command_list_request == 'analyst_measurements': command_list = command_lists.analyst_measurement_list(problem) elif command_list_request == 'analyst_stakeholders': command_list = command_lists.analyst_stakeholder_list(problem) return Response({'list': command_list})
def callback(ch, method, properties, body): thread_user_info = get_or_create_user_information( request.session, request.user, 'EOSS') message = json.loads(body) logger.debug( "Problem parameters received: (session key: {0})".format( sessionKey)) if message['type'] == 'entities': message['type'] = 'data.mining.problem.entities' # Look for channel to send back to user channel_layer = get_channel_layer() async_to_sync(channel_layer.send)( thread_user_info.channel_name, message)
def post(self, request, format=None): # Preprocess the command processed_command = nlp(request.data['command'].strip().lower()) # Classify the command, obtaining a command type command_options = ['iFEED', 'VASSAR', 'Critic', 'Historian', 'EDL'] condition_names = ['ifeed', 'analyst', 'critic', 'historian', 'edl'] command_types = command_processing.classify_command(processed_command) # Define context and see if it was already defined for this session user_info = get_or_create_user_information(request.session, request.user, 'EOSS') # Remove all past answers related to this user Answer.objects.filter( eosscontext__exact=user_info.eosscontext).delete() AllowedCommand.objects.filter( eosscontext__exact=user_info.eosscontext).delete() if 'allowed_commands' in request.data: for command_type, command_list in request.data[ 'allowed_commands'].items(): for command_number in command_list: AllowedCommand.objects.create( eosscontext=user_info.eosscontext, command_type=command_type, command_descriptor=command_number) # Act based on the types for command_type in command_types: command_class = command_options[command_type] condition_name = condition_names[command_type] answer = command_processing.command(processed_command, command_class, condition_name, user_info) Answer.objects.create( eosscontext=user_info.eosscontext, voice_answer=answer["voice_answer"], visual_answer_type=json.dumps(answer["visual_answer_type"]), visual_answer=json.dumps(answer["visual_answer"])) frontend_response = command_processing.think_response(user_info) return Response({'response': frontend_response})
def post(self, request, format=None): try: # Start connection with VASSAR user_info = get_or_create_user_information(request.session, request.user, 'EOSS') port = user_info.eosscontext.vassar_port self.VASSARClient = VASSARClient(port) self.VASSARClient.startConnection() list = self.VASSARClient.getOrbitList(request.data['problem_name']) # End the connection before return statement self.VASSARClient.endConnection() return Response(list) except Exception: logger.exception('Exception in getting the orbit list') self.VASSARClient.endConnection() return Response('')
def post(self, request, format=None): user_info = get_or_create_user_information(request.session, request.user, 'EOSS') if 'show_background_search_feedback' in request.data: show_background_search_feedback = request.data['show_background_search_feedback'] == 'true' user_info.eosscontext.activecontext.show_background_search_feedback = show_background_search_feedback if show_background_search_feedback: back_list = send_archs_from_queue_to_main_dataset(user_info) channel_layer = get_channel_layer() send_archs_back(channel_layer, user_info.channel_name, back_list) if 'check_for_diversity' in request.data: check_for_diversity = request.data['check_for_diversity'] == 'true' user_info.eosscontext.activecontext.check_for_diversity = check_for_diversity if 'show_arch_suggestions' in request.data: show_arch_suggestions = request.data['show_arch_suggestions'] == 'true' user_info.eosscontext.activecontext.show_arch_suggestions = show_arch_suggestions user_info.eosscontext.activecontext.save() user_info.save() return Response({})
def post(self, request, format=None): try: # Start data mining client self.DataMiningClient.startConnection() user_info = get_or_create_user_information(request.session, request.user, 'EOSS') # Get threshold values for the metrics supp = float(request.POST['supp']) conf = float(request.POST['conf']) lift = float(request.POST['lift']) # Get selected arch id's behavioral = json.loads(request.POST['selected']) non_behavioral = json.loads(request.POST['non_selected']) featureExpression = request.POST['featureExpression'] logicalConnective = request.POST['logical_connective'] # Load architecture data from the session info dataset = Design.objects.filter( eosscontext_id__exact=user_info.eosscontext.id).all() problem = request.POST['problem'] inputType = request.POST['input_type'] drivingFeatures = self.DataMiningClient.getMarginalDrivingFeatures( problem, inputType, behavioral, non_behavioral, dataset, featureExpression, logicalConnective, supp, conf, lift) output = drivingFeatures # End the connection before return statement self.DataMiningClient.endConnection() return Response(output) except Exception as detail: logger.exception('Exception in getDrivingFeatures: ' + detail) self.DataMiningClient.endConnection() return Response('')
def post(self, request, format=None): try: user_info = get_or_create_user_information(request.session, request.user, 'EOSS') port = user_info.eosscontext.vassar_port inputs = request.POST['inputs'] inputs = json.loads(inputs) critique = self.get_history_critique(inputs) critique += self.get_expert_critique(inputs, port) critiques = json.dumps(critique) return Response(critiques) except Exception: logger.exception('Exception in generating a critique of an architecture') return Response('')
def post(self, request, format=None): try: user_info = get_or_create_user_information(request.session, request.user, 'EOSS') port = user_info.eosscontext.vassar_port self.VASSARClient = VASSARClient(port) # Start connection with VASSAR self.VASSARClient.startConnection() inputs = request.data['inputs'] inputs = json.loads(inputs) architecture = self.VASSARClient.evaluateArchitecture( user_info.eosscontext.problem, inputs) is_same = True for old_arch in user_info.eosscontext.design_set.all(): is_same = True old_arch_outputs = json.loads(old_arch.outputs) for i in range(len(old_arch_outputs)): if old_arch_outputs[i] != architecture['outputs'][i]: is_same = False if is_same: break if not is_same: architecture['id'] = user_info.eosscontext.last_arch_id print(user_info.eosscontext.last_arch_id) add_design(architecture, user_info.eosscontext, False) user_info.save() # End the connection before return statement self.VASSARClient.endConnection() return Response(architecture) except Exception: logger.exception('Exception in evaluating an architecture') self.VASSARClient.endConnection() return Response('')
def post(self, request, format=None): user_info = get_or_create_user_information(request.session, request.user, 'EOSS') try: sessionKey = request.session.session_key # Start data mining client self.DataMiningClient.startConnection() problem = request.POST['problem'] params = None if problem == "ClimateCentric": params_ = self.DataMiningClient.client.getAssigningProblemEntities( sessionKey, problem) params = {} params['leftSet'] = params_.leftSet params['rightSet'] = params_.rightSet conceptHierarhcy_ = self.DataMiningClient.client.getAssigningProblemConceptHierarchy( sessionKey, problem, AssigningProblemEntities(params['leftSet'], params['rightSet'])) params['instanceMap'] = conceptHierarhcy_.instanceMap params['superclassMap'] = conceptHierarhcy_.superclassMap else: raise NotImplementedError( "Unsupported problem formulation: {0}".format(problem)) # End the connection before return statement self.DataMiningClient.endConnection() return Response(params) except Exception as detail: logger.exception('Exception in GetProblemParameters: ' + str(detail)) self.DataMiningClient.endConnection() return Response('')
def post(self, request, format=None): if request.user.is_authenticated: try: user_info = get_or_create_user_information( request.session, request.user, 'EOSS') # Start connection with VASSAR port = user_info.eosscontext.vassar_port client = VASSARClient(port) client.startConnection() problem = request.data['problem'] inputType = request.data['inputType'] # Call the GA stop function on Engineer if inputType == 'binary': client.client.stopGABinaryInput(request.user.username) while client.client.isGABinaryInputRunning(): time.sleep(0.1) elif inputType == 'discrete': client.client.stopGADiscreteInput(request.user.username) while client.client.isGADiscreteInputRunning(): time.sleep(0.1) else: raise ValueError( 'Unrecognized input type: {0}'.format(inputType)) # End the connection before return statement client.endConnection() return Response('GA stopped correctly!') except Exception: logger.exception('Exception in stopping the GA!') client.endConnection() return Response('') else: return Response('This is only available to registered users!')
def post(self, request, format=None): try: sessionKey = request.session.session_key # Start connection with DataMiningClient self.DataMiningClient.startConnection() user_info = get_or_create_user_information(request.session, request.user, 'EOSS') logger.debug("StopSearch (session key: {0})".format( request.session.session_key)) # Stop the generalization search self.DataMiningClient.client.stopSearch(sessionKey) # End the connection before return statement self.DataMiningClient.endConnection() return Response('Generalization stopped correctly!') except Exception as detail: logger.exception('Exception in StopSearch(): ' + str(detail)) self.DataMiningClient.endConnection() return Response('')
def post(self, request, format=None): try: user_info = get_or_create_user_information(request.session, request.user, 'EOSS') # Set the path of the file containing data user_path = request.user.username if request.data[ 'load_user_files'] == 'true' != '' else 'default' problem = request.data['problem'] filename = request.data['filename'] file_path = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'data', user_path, problem, filename) input_num = int(request.data['input_num']) input_type = request.data['input_type'] output_num = int(request.data['output_num']) user_info.eosscontext.last_arch_id = 0 # Open the file with open(file_path) as csvfile: Design.objects.filter( eosscontext__exact=user_info.eosscontext).delete() architectures = [] architectures_json = [] inputs_unique_set = set() # For each row, store the information has_header = csv.Sniffer().has_header(csvfile.read(1024)) csvfile.seek(0) # Read the file as a csv file reader = csv.reader(csvfile, delimiter=',') for row in reader: if has_header: has_header = False continue inputs = [] outputs = [] # Import inputs for i in range(input_num): if input_type == 'binary': # Assumes that there is only one column for the inputs inputs = self.booleanString2booleanArray(row[i]) elif input_type == 'discrete': inputs.append(int(row[i])) else: raise ValueError( 'Unknown input type: {0}'.format(input_type)) for i in range(output_num): out = row[i + input_num] if out == "": out = 0 else: out = float(out) outputs.append(out) hashed_input = hash(tuple(inputs)) if hashed_input not in inputs_unique_set: architectures.append( Design(id=user_info.eosscontext.last_arch_id, eosscontext=user_info.eosscontext, inputs=json.dumps(inputs), outputs=json.dumps(outputs))) architectures_json.append({ 'id': user_info.eosscontext.last_arch_id, 'inputs': inputs, 'outputs': outputs }) user_info.eosscontext.last_arch_id += 1 inputs_unique_set.add(hashed_input) # Define context and see if it was already defined for this session Design.objects.bulk_create(architectures) user_info.eosscontext.problem = problem user_info.eosscontext.dataset_name = filename user_info.eosscontext.dataset_user = request.data[ 'load_user_files'] == 'true' user_info.eosscontext.save() user_info.save() return Response(architectures_json) except Exception: raise ValueError( "There has been an error when parsing the architectures")