def handle(self, *args, **options): try: file_path = args[0] except IndexError: raise CommandError('First argument must be a source file') with open(file_path, 'r', newline='', encoding='utf-8') as source: decls = json.load(source) counter = 0 Declaration.init() # Apparently this is required to init mappings for row in decls: mapped = self.map_fields(row) res = Declaration.search().filter( Term(general__last_name=mapped[ 'general']['last_name'].lower().split('-')) & Term(general__name=mapped[ 'general']['name'].lower().split('-')) & Term(intro__declaration_year=mapped[ 'intro']['declaration_year']) ) if mapped['general']['patronymic']: res = res.filter(Term(general__patronymic=mapped[ 'general']['patronymic'].lower())) res = res.execute() if not res.hits: item = Declaration(**mapped) item.save() counter += 1 self.stdout.write( 'Loaded {} items to persistence storage'.format(counter))
def handle(self, *args, **options): try: file_path = args[0] id_prefix = args[1] except IndexError: raise CommandError( 'First argument must be a source file and second is a id prefix' ) groups = defaultdict(list) with open(file_path, 'r', newline='', encoding='utf-8') as source: reader = csv.DictReader(source, delimiter=',') counter = 0 for row in reader: status_col = 'Status' if 'Status' in row else 'Статус' if row[status_col] == '' or row[status_col] == 'Ок': groups[row[self._group_column(row)]].append(row) counter += 1 self.stdout.write( 'Read {} valid rows from the input file'.format(counter)) Declaration.init() # Apparently this is required to init mappings declarations = map(self.merge_group, groups.values()) counter = 0 for declaration in declarations: mapped = self.map_fields(declaration, id_prefix) res = Declaration.search().filter( Term(general__last_name=mapped['general'] ['last_name'].lower().split('-')) & Term( general__name=mapped['general']['name'].lower().split('-')) & Term(intro__declaration_year=mapped['intro'] ['declaration_year'])) if mapped['general']['patronymic']: res = res.filter( Term(general__patronymic=mapped['general'] ['patronymic'].lower())) res = res.execute() if res.hits: self.stdout.write("%s (%s) already exists" % (mapped['general']['full_name'], mapped['intro']['declaration_year'])) mapped['_id'] = res.hits[0]._id item = Declaration(**mapped) item.save() counter += 1 self.stdout.write( 'Loaded {} items to persistence storage'.format(counter))
def handle(self, *args, **options): try: file_path = args[0] id_prefix = args[1] except IndexError: raise CommandError( 'First argument must be a source file and second is a id prefix') groups = defaultdict(list) with open(file_path, 'r', newline='', encoding='utf-8') as source: reader = csv.DictReader(source, delimiter=',') counter = 0 for row in reader: status_col = 'Status' if 'Status' in row else 'Статус' if row[status_col] == '' or row[status_col] == 'Ок': groups[row[self._group_column(row)]].append(row) counter += 1 self.stdout.write('Read {} valid rows from the input file'.format(counter)) Declaration.init() # Apparently this is required to init mappings declarations = map(self.merge_group, groups.values()) counter = 0 for declaration in declarations: mapped = self.map_fields(declaration, id_prefix) res = Declaration.search().filter( Term(general__last_name=mapped[ 'general']['last_name'].lower().split('-')) & Term(general__name=mapped[ 'general']['name'].lower().split('-')) & Term(intro__declaration_year=mapped[ 'intro']['declaration_year']) ) if mapped['general']['patronymic']: res = res.filter(Term(general__patronymic=mapped[ 'general']['patronymic'].lower())) res = res.execute() if res.hits: self.stdout.write( "%s (%s) already exists" % ( mapped['general']['full_name'], mapped['intro']['declaration_year'])) mapped['_id'] = res.hits[0]._id item = Declaration(**mapped) item.save() counter += 1 self.stdout.write('Loaded {} items to persistence storage'.format(counter))
def handle(self, *args, **options): try: file_path = args[0] id_prefix = args[1] except IndexError: raise CommandError("First argument must be a source file and second is a id prefix") groups = defaultdict(list) with open(file_path, "r", newline="", encoding="utf-8") as source: reader = csv.DictReader(source, delimiter=",") counter = 0 for row in reader: status_col = "Status" if "Status" in row else "Статус" if row[status_col] == "" or row[status_col] == "Ок": groups[row[self._group_column(row)]].append(row) counter += 1 self.stdout.write("Read {} valid rows from the input file".format(counter)) Declaration.init() # Apparently this is required to init mappings declarations = map(self.merge_group, groups.values()) counter = 0 for declaration in declarations: mapped = self.map_fields(declaration, id_prefix) res = Declaration.search().filter( Terms(general__last_name=mapped["general"]["last_name"].lower().split("-")) & Terms(general__name=mapped["general"]["name"].lower().split("-")) & Term(intro__declaration_year=mapped["intro"]["declaration_year"]) ) if mapped["general"]["patronymic"]: res = res.filter(Term(general__patronymic=mapped["general"]["patronymic"].lower())) res = res.execute() if res.hits: self.stdout.write( "%s (%s) already exists" % (mapped["general"]["full_name"], mapped["intro"]["declaration_year"]) ) mapped["_id"] = res.hits[0]._id else: self.stdout.write( "%s (%s) created" % (mapped["general"]["full_name"], mapped["intro"]["declaration_year"]) ) item = Declaration(**mapped) item.save() counter += 1 self.stdout.write("Loaded {} items to persistence storage".format(counter))
def handle(self, *args, **options): try: file_path = args[0] except IndexError: raise CommandError('First argument must be a source file') with open(file_path, 'r', newline='', encoding='utf-8') as source: decls = json.load(source) counter = 0 Declaration.init() # Apparently this is required to init mappings for row in decls: if "fields" not in row["details"]: continue mapped = self.map_fields(row) res = Declaration.search().query( Q('terms', general__last_name=mapped['general'] ['last_name'].lower().split("-")) & Q('terms', general__name=mapped['general']['name'].lower().split( "-")) & Q('term', intro__declaration_year=mapped['intro'] ['declaration_year'])) if mapped['general']['patronymic']: res = res.query('term', general__patronymic=mapped['general'] ['patronymic'].lower()) self.stdout.write("Checking %s (%s)" % (mapped['general']['full_name'], mapped['intro']['declaration_year'])) res = res.execute() if not res.hits: item = Declaration(**mapped) item.save() counter += 1 else: self.stdout.write("%s (%s) already exists" % (mapped['general']['full_name'], mapped['intro']['declaration_year'])) self.stdout.write( 'Loaded {} items to persistence storage'.format(counter))
def handle(self, *args, **options): try: file_path = args[0] except IndexError: raise CommandError('First argument must be a source file') with open(file_path, 'r', newline='', encoding='utf-8') as source: reader = csv.DictReader(source, delimiter=";") counter = 0 Declaration.init() # Apparently this is required to init mappings for row in reader: item = Declaration(**self.map_fields(row)) item.save() counter += 1 self.stdout.write( 'Loaded {} items to persistence storage'.format(counter))
def handle(self, *args, **options): try: file_path = args[0] except IndexError: raise CommandError('First argument must be a source file') with open(file_path, 'r', newline='', encoding='utf-8') as source: reader = csv.DictReader(source, delimiter=";") counter = 0 Declaration.init() # Apparently this is required to init mappings for row in reader: mapped = self.map_fields(row) res = Declaration.search().query( Q('terms', general__last_name=mapped['general'] ['last_name'].lower().split("-")) & Q('terms', general__name=mapped['general']['name'].lower().split( "-")) & Q('term', intro__declaration_year=mapped['intro'] ['declaration_year'])) if mapped['general']['patronymic']: res = res.query('term', general__patronymic=mapped['general'] ['patronymic'].lower()) res = res.execute() if res.hits: mapped["_id"] = res.hits[0]._id item = Declaration(**mapped) item.save() counter += 1 self.stdout.write( 'Loaded {} items to persistence storage'.format(counter))
def handle(self, *args, **options): try: file_path = args[0] except IndexError: raise CommandError('First argument must be a source file') with open(file_path, 'r', newline='', encoding='utf-8') as source: reader = csv.DictReader(source, delimiter=";") counter = 0 Declaration.init() # Apparently this is required to init mappings for row in reader: mapped = self.map_fields(row) res = Declaration.search().filter( Term(general__last_name=mapped[ "general"]["last_name"].lower().split("-")) & Term(general__name=mapped[ "general"]["name"].lower().split("-")) & Term(intro__declaration_year=mapped[ "intro"]["declaration_year"]) ) if mapped["general"]["patronymic"]: res = res.filter(Term(general__patronymic=mapped[ "general"]["patronymic"].lower())) res = res.execute() if res.hits: mapped["id"] = res.hits[0]._id item = Declaration(**mapped) item.save() counter += 1 self.stdout.write( 'Loaded {} items to persistence storage'.format(counter))
def handle(self, *args, **options): try: file_path = options["file_path"] id_prefix = options["prefix"] except IndexError: raise CommandError( 'First argument must be a source file and second is a id prefix' ) if hasattr(sys.stdin, 'isatty') and not sys.stdin.isatty(): self.stdout.write( "To import something you need to run this command in TTY.") return groups = defaultdict(list) with open(file_path, 'r', newline='', encoding='utf-8') as source: reader = csv.DictReader(source, delimiter='\t') counter = 0 for row in reader: status_col = 'Status' if 'Status' in row else 'Статус' if row[status_col] == '' or row[status_col] == 'Ок': groups[row[self._group_column(row)]].append(row) counter += 1 self.stdout.write( 'Read {} valid rows from the input file'.format(counter)) Declaration.init() # Apparently this is required to init mappings declarations = map(self.merge_group, groups.values()) counter = 0 for declaration in declarations: mapped = self.map_fields(declaration, id_prefix) res = Declaration.search().query( Q('terms', general__last_name=mapped['general'] ['last_name'].lower().split("-")) & Q('terms', general__name=mapped['general']['name'].lower().split("-")) & Q('term', intro__declaration_year=mapped['intro'] ['declaration_year'])) if mapped['general']['patronymic']: res = res.query('term', general__patronymic=mapped['general'] ['patronymic'].lower()) res = res.execute() if res.hits: self.stdout.write( "Person\n%s (%s, %s, %s, %s)\n%s\nalready exists" % (mapped['general']['full_name'], mapped['general']['post']['post'], mapped['general']['post']['office'], mapped['general']['post']['region'], mapped['intro']['declaration_year'], mapped['declaration']['url'])) for i, hit in enumerate(res.hits): self.stdout.write("%s: %s (%s, %s, %s, %s), %s\n%s" % (i + 1, hit['general']['full_name'], hit['general']['post']['post'], hit['general']['post']['office'], hit['general']['post']['region'], hit['intro']['declaration_year'], hit._id, hit['declaration']['url'])) msg = ( "Select one of persons above to replace, or press [s] " + "to skip current record or [c] to create new (default): ") r = input(msg).lower() or "c" if r == "s": self.stdout.write("Ok, skipping") continue if r.isdigit() and int(r) <= len(res.hits): r = int(r) - 1 mapped['_id'] = res.hits[r]._id self.stdout.write("Ok, replacing %s" % res.hits[r]._id) else: self.stdout.write("Ok, adding new record") else: self.stdout.write("%s (%s) created" % (mapped['general']['full_name'], mapped['intro']['declaration_year'])) item = Declaration(**mapped) item.save() counter += 1 self.stdout.write( 'Loaded {} items to persistence storage'.format(counter))