def test_8_get_detail_labels(api_client): hearing = Hearing(title='') hearing.save() label_one = Label(label='Label One') label_one.save() label_two = Label(label='Label Two') label_two.save() label_three = Label(label='Label Three') label_three.save() hearing.labels.add(label_one, label_two, label_three) response = api_client.get(get_detail_url(hearing.id)) data = get_data_from_response(response) assert 'results' not in data assert len(data['labels']) is 3 assert { 'id': label_one.id, 'label': { 'en': label_one.label } } in data['labels']
def import_hearing(hearing_datum, force=False): hearing_datum = deepcopy( hearing_datum ) # We'll be mutating the data as we go, so it's courteous to take a copy. hearing_datum.pop("id") slug = hearing_datum.pop("slug") old_hearing = Hearing.objects.filter(id=slug).first() if old_hearing: # pragma: no cover if settings.DEBUG or force: log.info( "Hearing %s already exists, importing new entry with mutated slug", slug) slug += "_%s" % get_random_string(5) else: log.info("Hearing %s already exists, skipping", slug) return if "_geometry" in hearing_datum: # pragma: no branch # `_geometry` is the parsed version of `_area`, so get rid of that hearing_datum.pop("_area", None) hearing = Hearing( id=slug, created_at=parse_aware_datetime(hearing_datum.pop("created_at")), modified_at=parse_aware_datetime(hearing_datum.pop("updated_at")), open_at=parse_aware_datetime(hearing_datum.pop("opens_at")), close_at=parse_aware_datetime(hearing_datum.pop("closes_at")), title=hearing_datum.pop("title"), published=(hearing_datum.pop("published") == "true"), geojson=(hearing_datum.pop("_geometry", None) or None)) assert not hearing.geojson or isinstance(hearing.geojson, dict) hearing.save(no_modified_at_update=True) hearing.sections.create( type=SectionType.objects.get( identifier=InitialSectionType.INTRODUCTION), title="", abstract=(hearing_datum.pop("lead") or ""), content=(hearing_datum.pop("body") or ""), ) import_comments(hearing, hearing_datum.pop("comments", ())) import_images(hearing, hearing_datum) for section_datum in sorted(hearing_datum.pop("sections", ()), key=itemgetter("position")): import_section(hearing, section_datum, InitialSectionType.PART, force) for alt_datum in sorted(hearing_datum.pop("alternatives", ()), key=itemgetter("position")): import_section(hearing, alt_datum, InitialSectionType.SCENARIO, force) # Compact section ordering... for index, section in enumerate(hearing.sections.order_by("ordering"), 1): section.ordering = index section.save(update_fields=("ordering", )) if hearing_datum.keys(): # pragma: no cover log.warn("These keys were not handled while importing %s: %s", hearing, hearing_datum.keys()) return hearing
def test_8_get_detail_closing_time(api_client): hearing = Hearing() hearing.save() response = api_client.get(get_detail_url(hearing.id)) data = get_data_from_response(response) assert 'results' not in data assert_datetime_fuzzy_equal(data['close_at'], hearing.close_at)
def test_8_get_detail_n_comments(api_client): hearing = Hearing(n_comments=1) hearing.save() response = api_client.get(get_detail_url(hearing.id)) data = get_data_from_response(response) assert 'results' not in data assert data['n_comments'] == hearing.n_comments
def test_8_get_detail_borough(api_client): hearing = Hearing(borough='Itäinen') hearing.save() response = api_client.get(get_detail_url(hearing.id)) data = get_data_from_response(response) assert 'results' not in data assert data['borough'] == hearing.borough
def test_8_get_detail_abstract(api_client): hearing = Hearing(abstract='Lorem Ipsum Abstract') hearing.save() response = api_client.get(get_detail_url(hearing.id)) data = get_data_from_response(response) assert 'results' not in data assert data['abstract'] == hearing.abstract
def test_8_get_detail_closing_time(api_client): hearing = Hearing(title='title') hearing.save() response = api_client.get(get_detail_url(hearing.id)) data = get_data_from_response(response) assert 'results' not in data assert_datetime_fuzzy_equal(data['close_at'], hearing.close_at)
def test_8_get_detail_borough(api_client): hearing = Hearing(borough='Itäinen', title="is required") hearing.save() response = api_client.get(get_detail_url(hearing.id)) data = get_data_from_response(response) assert 'results' not in data assert data['borough'][default_lang_code] == hearing.borough
def test_8_get_detail_n_comments(api_client): hearing = Hearing(n_comments=1, title='title') hearing.save() response = api_client.get(get_detail_url(hearing.id)) data = get_data_from_response(response) assert 'results' not in data assert data['n_comments'] == hearing.n_comments
def test_8_get_detail_title(api_client): hearing = Hearing(title='Lorem Ipsum Title') hearing.save() response = api_client.get(get_detail_url(hearing.id)) data = get_data_from_response(response) assert 'results' not in data assert data['title'] == hearing.title
def test_8_get_detail_title(api_client): hearing = Hearing(title='Lorem Ipsum Title') hearing.save() response = api_client.get(get_detail_url(hearing.id)) data = get_data_from_response(response) assert 'results' not in data assert data['title'][default_lang_code] == hearing.title
def import_hearing(hearing_datum, force=False, patch=False): hearing_datum = deepcopy( hearing_datum ) # We'll be mutating the data as we go, so it's courteous to take a copy. hearing_datum.pop("id") slug = hearing_datum.pop("slug") old_hearing = Hearing.objects.filter(id=slug).first() if old_hearing: # pragma: no cover if patch: log.info("Hearing %s already exists, patching existing hearing", slug) # force is needed to import all the components with new ids if need be force = True elif settings.DEBUG or force: log.info( "Hearing %s already exists, importing new entry with mutated slug", slug) slug += "_%s" % get_random_string(5) else: log.info("Hearing %s already exists, skipping", slug) return if "_geometry" in hearing_datum: # pragma: no branch # `_geometry` is the parsed version of `_area`, so get rid of that hearing_datum.pop("_area", None) hearing = Hearing( id=slug, created_at=parse_aware_datetime(hearing_datum.pop("created_at")), modified_at=parse_aware_datetime(hearing_datum.pop("updated_at")), open_at=parse_aware_datetime(hearing_datum.pop("opens_at")), close_at=parse_aware_datetime(hearing_datum.pop("closes_at")), title=hearing_datum.pop("title"), published=(hearing_datum.pop("published") == "true"), geojson=(hearing_datum.pop("_geometry", None) or None)) assert not hearing.geojson or isinstance(hearing.geojson, dict) hearing.save(no_modified_at_update=True) # if patching, soft delete old data to prevent duplicates if patch: clean_hearing_for_patching(hearing) main_section = hearing.sections.create( type=SectionType.objects.get(identifier=InitialSectionType.MAIN), title="", abstract=(hearing_datum.pop("lead") or ""), content=(hearing_datum.pop("body") or ""), ) import_comments(main_section, hearing_datum.pop("comments", ())) import_images(main_section, hearing_datum) import_sections(hearing, hearing_datum, force) compact_section_ordering(hearing) if hearing_datum.keys(): # pragma: no cover log.warning("These keys were not handled while importing %s: %s", hearing, hearing_datum.keys()) return hearing
def test_7_get_detail_servicemap(api_client): hearing = Hearing( servicemap_url='http://servicemap.hel.fi/embed/?bbox=60.19276,24.93300,60.19571,24.94513&city=helsinki' ) hearing.save() response = api_client.get(get_detail_url(hearing.id)) data = get_data_from_response(response) assert 'results' not in data assert data['servicemap_url'] == hearing.servicemap_url
def test_7_get_detail_servicemap(api_client): hearing = Hearing( servicemap_url= 'http://servicemap.hel.fi/embed/?bbox=60.19276,24.93300,60.19571,24.94513&city=helsinki' ) hearing.save() response = api_client.get(get_detail_url(hearing.id)) data = get_data_from_response(response) assert 'results' not in data assert data['servicemap_url'] == hearing.servicemap_url
def import_hearing(hearing_datum, force=False, patch=False): hearing_datum = deepcopy(hearing_datum) # We'll be mutating the data as we go, so it's courteous to take a copy. hearing_datum.pop("id") slug = hearing_datum.pop("slug") old_hearing = Hearing.objects.filter(id=slug).first() if old_hearing: # pragma: no cover if patch: log.info("Hearing %s already exists, patching existing hearing", slug) # force is needed to import all the components with new ids if need be force = True elif settings.DEBUG or force: log.info("Hearing %s already exists, importing new entry with mutated slug", slug) slug += "_%s" % get_random_string(5) else: log.info("Hearing %s already exists, skipping", slug) return if "_geometry" in hearing_datum: # pragma: no branch # `_geometry` is the parsed version of `_area`, so get rid of that hearing_datum.pop("_area", None) hearing = Hearing( id=slug, created_at=parse_aware_datetime(hearing_datum.pop("created_at")), modified_at=parse_aware_datetime(hearing_datum.pop("updated_at")), open_at=parse_aware_datetime(hearing_datum.pop("opens_at")), close_at=parse_aware_datetime(hearing_datum.pop("closes_at")), title=hearing_datum.pop("title"), published=(hearing_datum.pop("published") == "true"), geojson=(hearing_datum.pop("_geometry", None) or None) ) assert not hearing.geojson or isinstance(hearing.geojson, dict) hearing.save(no_modified_at_update=True) hearing.sections.create( type=SectionType.objects.get(identifier=InitialSectionType.INTRODUCTION), title="", abstract=(hearing_datum.pop("lead") or ""), content=(hearing_datum.pop("body") or ""), ) import_comments(hearing, hearing_datum.pop("comments", ()), patch) import_images(hearing, hearing_datum, patch) import_sections(hearing, hearing_datum, force, patch) # Compact section ordering... for index, section in enumerate(hearing.sections.order_by("ordering"), 1): section.ordering = index section.save(update_fields=("ordering",)) if hearing_datum.keys(): # pragma: no cover log.warn("These keys were not handled while importing %s: %s", hearing, hearing_datum.keys()) return hearing
def test_8_get_detail_labels(api_client): hearing = Hearing() hearing.save() label_one = Label(label='Label One') label_one.save() label_two = Label(label='Label Two') label_two.save() label_three = Label(label='Label Three') label_three.save() hearing.labels.add(label_one, label_two, label_three) response = api_client.get(get_detail_url(hearing.id)) data = get_data_from_response(response) assert 'results' not in data assert len(data['labels']) is 3 assert label_one.label in data['labels']