def post(self): auth_key = self.request.get('key') auth = model.Authorization.all().filter('auth_key =', auth_key).get() if auth: source_domain = auth.domain try: person_records, note_records = pfif.parse_file(self.request.body_file) except Exception, e: self.response.set_status(400) self.write('Invalid XML: %s\n' % e) return self.response.headers['Content-Type'] = 'application/xml' self.write('<?xml version="1.0"?>\n') self.write('<status:status>\n') written, skipped, total = importer.import_records( source_domain, importer.create_person, person_records) self.write_status('person', written, skipped, total, 'person_record_id') written, skipped, total = importer.import_records( source_domain, importer.create_note, note_records) self.write_status('note', written, skipped, total, 'note_record_id') self.write('</status:status>\n')
def import_site_export(export_path, remote_api_host, app_id, batch_size, store_all): # log in, then use the pfif parser to parse the export file. Use the importer # methods to convert the dicts to entities then add them as in import.py, but # less strict, to ensure that all exported data is available. remote_api.init(app_id, remote_api_host) logging.info('%s: importing exported records from %s', remote_api_host, export_path) if not export_path.endswith('.zip'): export_fd = open(export_path) else: export_fd = open_file_inside_zip(export_path) persons, notes = pfif.parse_file(export_fd) logging.info('loaded %d persons, %d notes', len(persons), len(notes)) if not store_all: persons = [d for d in persons if not importer.is_local_domain(d.get('person_record_id', ''), 'person')] notes = [d for d in notes if not importer.is_local_domain(d.get('note_record_id', ''), 'note')] logging.info('... down to %d persons, %d notes after excluding %s records', len(persons), len(notes), HOME_DOMAIN) logging.info('... adding persons') add_entities(persons, create_person, batch_size, 'person', store_all) logging.info('... adding notes') add_entities(notes, create_note, batch_size, 'note', store_all)
def test_parse_files(self): """Tests parsing of an XML file for each test case.""" for test_name, test_case in TEST_CASES: if not test_case.do_parse_test: continue person_records, note_records = pfif.parse_file( StringIO.StringIO(test_case.xml)) assert person_records == test_case.person_records, (test_name + ':\n' + pprint_diff(test_case.person_records, person_records)) assert note_records == test_case.note_records, (test_name + ':\n' + pprint_diff(test_case.note_records, note_records))
def post(self): if not (self.auth and self.auth.domain_write_permission): self.info(403, message="Missing or invalid authorization key", style="plain") return source_domain = self.auth.domain_write_permission try: person_records, note_records = pfif.parse_file(self.request.body_file) except Exception, e: self.info(400, message="Invalid XML: %s" % e, style="plain") return
def post(self): if not (self.auth and self.auth.domain_write_permission): self.response.set_status(403) self.write("Missing or invalid authorization key\n") return source_domain = self.auth.domain_write_permission try: person_records, note_records = pfif.parse_file(self.request.body_file) except Exception, e: self.response.set_status(400) self.write("Invalid XML: %s\n" % e) return
def post(self): if not (self.auth and self.auth.domain_write_permission): self.info(403, message='Missing or invalid authorization key', style='plain') return source_domain = self.auth.domain_write_permission try: person_records, note_records = \ pfif.parse_file(self.request.body_file) except Exception, e: self.info(400, message='Invalid XML: %s' % e, style='plain') return
def post(self): if not (self.auth and self.auth.domain_write_permission): self.response.set_status(403) self.write('Missing or invalid authorization key\n') return source_domain = self.auth.domain_write_permission try: person_records, note_records = \ pfif.parse_file(self.request.body_file) except Exception, e: self.response.set_status(400) self.write('Invalid XML: %s\n' % e) return
def import_site_export(export_path, remote_api_host, app_id, batch_size, store_all): # Log in, then use the pfif parser to parse the export file. Use the # importer methods to convert the dicts to entities then add them as in # import.py, but less strict, to ensure that all exported data is available. remote_api.connect(remote_api_host, app_id) logging.info("%s: importing exported records from %s", remote_api_host, export_path) if not export_path.endswith(".zip"): export_fd = open(export_path) else: export_fd = open_file_inside_zip(export_path) persons, notes = pfif.parse_file(export_fd) logging.info("loaded %d persons, %d notes", len(persons), len(notes)) if not store_all: persons = [d for d in persons if is_clone(d.get("person_record_id"))] notes = [d for d in notes if is_clone(d.get("note_record_id"))] logging.info( "... down to %d persons, %d notes after excluding %r records", len(persons), len(notes), HOME_DOMAIN ) logging.info("... adding persons") add_entities(persons, create_person, batch_size, "person", store_all) logging.info("... adding notes") add_entities(notes, create_note, batch_size, "note", store_all)
def import_site_export(export_path, remote_api_host, app_id, batch_size, store_all): # Log in, then use the pfif parser to parse the export file. Use the # importer methods to convert the dicts to entities then add them as in # import.py, but less strict, to ensure that all exported data is available. remote_api.connect(remote_api_host, app_id) logging.info('%s: importing exported records from %s', remote_api_host, export_path) if not export_path.endswith('.zip'): export_fd = open(export_path) else: export_fd = open_file_inside_zip(export_path) persons, notes = pfif.parse_file(export_fd) logging.info('loaded %d persons, %d notes', len(persons), len(notes)) if not store_all: persons = [d for d in persons if is_clone(d.get('person_record_id'))] notes = [d for d in notes if is_clone(d.get('note_record_id'))] logging.info( '... down to %d persons, %d notes after excluding %r records', len(persons), len(notes), HOME_DOMAIN) logging.info('... adding persons') add_entities(persons, create_person, batch_size, 'person', store_all) logging.info('... adding notes') add_entities(notes, create_note, batch_size, 'note', store_all)
def parse_file(self, file): # Do not rename fields to PFIF 1.4 return pfif.parse_file(file, rename_fields=False)[1]
def parse_file(self, file): return pfif.parse_file(file)[1]
def test_parse_file(self): file = StringIO.StringIO(PFIF_WITH_PREFIXES) person_records, note_records = pfif.parse_file(file) self.assertEqual([PERSON_RECORD], person_records) self.assertEqual([NOTE_RECORD], note_records)
def test_parse_file(self): file = StringIO.StringIO(PFIF_WITH_PREFIXES) person_records, note_records = pfif.parse_file(file) assert [PERSON_RECORD] == person_records assert [NOTE_RECORD] == note_records