def pm_meters_preview(self, request, pk): """ Returns validated type units, proposed imports and unlinkable PM ids """ org_id = request.query_params.get('organization_id') try: import_file = ImportFile.objects.get( pk=pk, import_record__super_organization_id=org_id) except ImportFile.DoesNotExist: return JsonResponse( { 'status': 'error', 'message': 'Could not find import file with pk=' + str(pk) }, status=status.HTTP_400_BAD_REQUEST) parser = reader.MCMParser(import_file.local_file, sheet_name='Meter Entries') raw_meter_data = list(parser.data) meters_parser = MetersParser(org_id, raw_meter_data) result = {} result["validated_type_units"] = meters_parser.validated_type_units() result["proposed_imports"] = meters_parser.proposed_imports result["unlinkable_pm_ids"] = meters_parser.unlinkable_pm_ids return result
def test_blank_row(self): self.xls_f.close() test_file = os.path.dirname( os.path.realpath(__file__)) + '/test_data/test_espm_blank_rows.xls' self.xls_f = open(test_file, 'rb') self.parser = reader.MCMParser(self.xls_f) self.total_callbacks = 0 self.assertEqual(self.parser.headers()[0], 'Property Id') self.assertEqual(self.parser.headers()[-1], 'Release Date')
def _save_raw_data(file_pk, *args, **kwargs): """Chunk up the CSV and save data into the DB raw.""" result = {'status': 'success', 'progress': 100} prog_key = get_prog_key('save_raw_data', file_pk) try: import_file = ImportFile.objects.get(pk=file_pk) if import_file.raw_save_done: result['status'] = 'warning' result['message'] = 'Raw data already saved' cache.set(prog_key, result) return result if import_file.source_type == "Green Button Raw": return _save_raw_green_button_data(file_pk, *args, **kwargs) parser = reader.MCMParser(import_file.local_file) cache_first_rows(import_file, parser) rows = parser.next() import_file.num_rows = 0 tasks = [] for chunk in batch(rows, 100): import_file.num_rows += len(chunk) tasks.append( _save_raw_data_chunk.subtask((chunk, file_pk, prog_key))) tasks = add_cache_increment_parameter(tasks) import_file.num_columns = parser.num_columns() import_file.save() if tasks: chord(tasks, interval=15)(finish_raw_save.subtask([file_pk])) else: finish_raw_save.task(file_pk) except StopIteration: result['status'] = 'error' result['message'] = 'StopIteration Exception' result['stacktrace'] = traceback.format_exc() except Error as e: result['status'] = 'error' result['message'] = 'File Content Error: ' + e.message result['stacktrace'] = traceback.format_exc() except KeyError as e: result['status'] = 'error' result['message'] = 'Invalid Column Name: "' + e.message + '"' result['stacktrace'] = traceback.format_exc() except Exception as e: result['status'] = 'error' result['message'] = 'Unhandled Error: ' + e.message result['stacktrace'] = traceback.format_exc() cache.set(prog_key, result) return result
def test_odd_date_format(self): """ Regression test to handle excel date format issues. More info at: https://secure.simplistix.co.uk/svn/xlrd/trunk/xlrd/doc/xlrd.html?p=4966 under 'Dates in Excel spreadsheets' """ self.xlsx_f.close() test_file = os.path.dirname(os.path.realpath(__file__)) + '/test_data/test_espm_date_format.xlsx' self.xlsx_f = open(test_file, 'rb') self.parser = reader.MCMParser(self.xlsx_f) list(self.parser.reader.excelreader)
def parsed_meters_confirmation(self, request): body = dict(request.data) file_id = body['file_id'] org_id = body['organization_id'] import_file = ImportFile.objects.get(pk=file_id) parser = reader.MCMParser(import_file.local_file, sheet_name='Meter Entries') raw_meter_data = list(parser.data) meters_parser = MetersParser(org_id, raw_meter_data) result = {} result["validated_type_units"] = meters_parser.validated_type_units() result["proposed_imports"] = meters_parser.proposed_imports() result["unlinkable_pm_ids"] = meters_parser.unlinkable_pm_ids return result
def setUp(self): test_file = os.path.dirname( os.path.realpath(__file__)) + '/test_data/test_espm.xls' self.xls_f = open(test_file, 'rb') self.parser = reader.MCMParser(self.xls_f) self.total_callbacks = 0