def _get_config(self, excel_fields): return ImporterConfig( couch_user_id=self.couch_user_id, case_type=self.case_type, excel_fields=excel_fields, case_fields=[''] * len(excel_fields), custom_fields=excel_fields, search_column=excel_fields[0], search_field='case_id', create_new_cases=True, )
def _get_importer_config(case_type, headers, user_id): return ImporterConfig( couch_user_id=user_id, case_type=case_type, excel_fields=headers, case_fields=[''] * len(headers), custom_fields=headers, search_column=headers[0], search_field='external_id', create_new_cases=True, )
def _config(self, col_names, search_column=None, case_type=None, search_field='case_id', create_new_cases=True): return ImporterConfig( couch_user_id=self.couch_user._id, case_type=case_type or self.default_case_type, excel_fields=col_names, case_fields=[''] * len(col_names), custom_fields=col_names, search_column=search_column or col_names[0], search_field=search_field, create_new_cases=create_new_cases, )
def handle(self, export_file, config_file, domain, **options): start = datetime.utcnow() with open(config_file, 'r', encoding='utf-8') as f: config = ImporterConfig.from_json(f.read()) with get_spreadsheet(export_file) as spreadsheet: print( json.dumps(do_import(spreadsheet, config, domain), default=json_handler)) print('finished in %s seconds' % (datetime.utcnow() - start).seconds)
def test_41e39e16__2016_12_16(self): ic = pickle.loads( '\x80\x02ccorehq.apps.case_importer.util\nImporterConfig\nq\x00(U\rcouch_user_idq\x01U\x0cexcel_fieldsq\x02U\x0bcase_fieldsq\x03U\rcustom_fieldsq\x04U\rsearch_columnq\x05U\tcase_typeq\x06U\x0csearch_fieldq\x07U\x10create_new_casesq\x08tq\t\x81q\n.' ) self.assertEqual( ic, ImporterConfig( couch_user_id='couch_user_id', excel_fields='excel_fields', case_fields='case_fields', custom_fields='custom_fields', search_column='search_column', case_type='case_type', search_field='search_field', create_new_cases='create_new_cases', ))
def handle(self, export_file, config_file, domain, user_id, **options): start = datetime.utcnow() if '@' in user_id: user = WebUser.get_by_username(user_id) else: user = WebUser.get(user_id) if not user.is_member_of(domain): raise CommandError("%s can't access %s" % (user, domain)) with open(config_file, 'r', encoding='utf-8') as f: config = ImporterConfig.from_json(f.read()) config.couch_user_id = user._id with get_spreadsheet(export_file) as spreadsheet: print(json.dumps(do_import(spreadsheet, config, domain), default=json_handler)) print('finished in %s seconds' % (datetime.utcnow() - start).seconds)
def test_e296e568__2016_12_16(self): ic = pickle.loads( '\x80\x02ccorehq.apps.case_importer.util\nImporterConfig\nq\x00(U\rcouch_user_idq\x01U\x0cexcel_fieldsq\x02U\x0bcase_fieldsq\x03U\rcustom_fieldsq\x04U\rsearch_columnq\x05U\nkey_columnq\x06U\x0cvalue_columnq\x07U\rnamed_columnsq\x08U\tcase_typeq\tU\x0csearch_fieldq\nU\x10create_new_casesq\x0btq\x0c\x81q\r.' ) self.assertEqual( ic, ImporterConfig( couch_user_id='couch_user_id', excel_fields='excel_fields', case_fields='case_fields', custom_fields='custom_fields', search_column='search_column', # these three were removed # key_column='key_column', # value_column='value_column', # named_columns='named_columns', case_type='case_type', search_field='search_field', create_new_cases='create_new_cases', ))
def handle(self, *args, **options): if len(args) != 4: raise CommandError('Usage is import_cases %s' % self.args) start = datetime.utcnow() export_file, config_file, domain, user_id = args if '@' in user_id: user = WebUser.get_by_username(user_id) else: user = WebUser.get(user_id) if not user.is_member_of(domain): raise CommandError("%s can't access %s" % (user, domain)) with open(config_file, 'r') as f: config = ImporterConfig.from_json(f.read()) config.couch_user_id = user._id with get_spreadsheet(export_file) as spreadsheet: print json.dumps(do_import(spreadsheet, config, domain), default=json_handler) print 'finished in %s seconds' % (datetime.utcnow() - start).seconds