def run_data_manipulation(self, config, expected): dataset_id = int(config['global_settings']['dataset_id']) config_string = config['global_settings']['config'] config = json.loads(config_string) result = apply_filter_config(dataset_id,config) if isinstance(result,list): print('Error = ',result) else: print('New dataset id =',result) dataset = SensorIngest.objects.get(pk=result) assert dataset != None assert dataset.id == dataset_id + 1 rows = dataset.merge(as_local_time= True) tmp_dir = tempfile.mkdtemp() temp_file = os.path.join(tmp_dir,"output.csv") with open(temp_file, 'w', newline='\n') as fout: csvwriter = csv.writer(fout) for r in rows: csvwriter.writerow(r) actual_rows = self._getCSV_asList(temp_file) expected_rows = self._getCSV_asList(expected) self._diff_checker(expected_rows, actual_rows) shutil.rmtree(tmp_dir, ignore_errors=True)
def run_data_manipulation(self, config, expected): dataset_id = int(config['global_settings']['dataset_id']) config_string = config['global_settings']['config'] config = json.loads(config_string) result = apply_filter_config(dataset_id, config) if isinstance(result, list): print('Error = ', result) else: print('New dataset id =', result) dataset = SensorIngest.objects.get(pk=result) assert dataset != None assert dataset.id == dataset_id + 1 rows = dataset.merge(as_local_time=True) tmp_dir = tempfile.mkdtemp() temp_file = os.path.join(tmp_dir, "output.csv") with open(temp_file, 'w', newline='\n') as fout: csvwriter = csv.writer(fout) for r in rows: csvwriter.writerow(r) actual_rows = self._getCSV_asList(temp_file) expected_rows = self._getCSV_asList(expected) self._diff_checker(expected_rows, actual_rows) shutil.rmtree(tmp_dir, ignore_errors=True)
def handle(self, *args, verbosity=1, dry_run=False, **options): try: config = ConfigParser() config.read(args[0]) dataset_id = int(config['global_settings']['dataset_id']) config_string = config['global_settings']['config'] print("Config String: ", config_string) config = json.loads(config_string) result = apply_filter_config(dataset_id, config) if isinstance(result, list): print('Error = ', result) else: print('New dataset id =', result) except Exception as e: # TODO: log errors print(traceback.format_exc())
def handle(self, *args, verbosity=1, dry_run=False, **options): try: config = ConfigParser() config.read(args[0]) dataset_id = int(config['global_settings']['dataset_id']) config_string = config['global_settings']['config'] print("Config String: ", config_string) config = json.loads(config_string) result = apply_filter_config(dataset_id,config) if isinstance(result,list): print('Error = ',result) else: print('New dataset id =',result) except Exception as e: # TODO: log errors print(traceback.format_exc())
def manipulate(self, request, *args, **kargs): def _iter_data(sensordata): for data in sensordata: if data.value is not None: yield data.time, data.value #request_data = "{\"config\": [[\"pnnl/isb2/OutdoorAirTemperature\", \"LinearInterpolation\", \ #{\"period_seconds\": 300, \"drop_extra\": false}],[\"pnnl/isb2/OutdoorAirTemperature\", \"RoundOff\", {\"places\": 2}]]}"; #config_string = json.loads(request_data) #print(config_string['config'])*/ serializer = serializers.DataSetManipulateSerializer(data=request.DATA) if serializer.is_valid(): dataset_id = self.get_object().id config = serializer.object['config'] result = apply_filter_config(dataset_id,config) if isinstance(result,list): print('Errors:') return Response(errors, status.HTTP_400_BAD_REQUEST) return Response(result) else: return Response("Not a valid config", status.HTTP_400_BAD_REQUEST)