from datetime import datetime
from os.path import dirname, join

import pytest  # noqa
from city_scrapers_core.constants import COMMITTEE, TENTATIVE
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.det_city_council import DetCityCouncilSpider

freezer = freeze_time('2019-02-22')
freezer.start()

test_response = file_response(
    join(dirname(__file__), "files", "det_city_council.html"),
    url=
    'https://detroitmi.gov/events/public-health-and-safety-standing-committee-02-25-19'
)
spider = DetCityCouncilSpider()
item = spider.parse_event_page(test_response)

freezer.stop()


def test_title():
    assert item['title'] == 'Public Health and Safety Standing Committee'


def test_description():
    assert item['description'] == ''
示例#2
0
from datetime import datetime
from os.path import dirname, join

import pytest  # noqa
from city_scrapers_core.constants import ADVISORY_COMMITTEE, PASSED
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.cle_design_review import CleDesignReviewSpider

test_response = file_response(
    join(dirname(__file__), "files", "cle_design_review.html"),
    url=(
        "http://clevelandohio.gov/CityofCleveland/Home/Government/CityAgencies/CityPlanningCommission/MeetingSchedules"  # noqa
    ),
)
spider = CleDesignReviewSpider()

freezer = freeze_time("2020-05-19")
freezer.start()

parsed_items = [item for item in spider.parse(test_response)]

freezer.stop()


def test_count():
    assert len(parsed_items) == 165


def test_title():
示例#3
0
from datetime import datetime
from os.path import dirname, join

import pytest
from city_scrapers_core.constants import BOARD, PASSED
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.il_gaming_board import IlGamingBoardSpider

test_response = file_response(
    join(dirname(__file__), "files", "il_gaming_board.html"),
    url="http://www.igb.illinois.gov/MeetingsMinutes.aspx",
)
spider = IlGamingBoardSpider()

freezer = freeze_time("2019-06-04")
freezer.start()

parsed_items = [item for item in spider.parse(test_response)]

freezer.stop()


def test_count():
    assert len(parsed_items) == 10


def test_title():
    assert parsed_items[0]["title"] == "Riverboat/Video Gaming"
from datetime import datetime
from os.path import dirname, join

from city_scrapers_core.constants import BOARD, COMMITTEE, TENTATIVE
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.chi_city_college import ChiCityCollegeSpider

freezer = freeze_time("2018-01-12")
freezer.start()
test_response = file_response(
    join(dirname(__file__), "files", "chi_city_college.html"),
    url="http://www.ccc.edu/events/Pages/March-2019-Board-and-Committee-Meetings.aspx",
)
spider = ChiCityCollegeSpider()
parsed_items = [item for item in spider.parse_event_page(test_response)]
freezer.stop()


def test_title():
    assert (
        parsed_items[0]["title"] == "Committee on Finance and Administrative Services"
    )
    assert parsed_items[1]["title"] == "Board of Trustees"


def test_start():
    assert parsed_items[0]["start"] == datetime(2019, 2, 7, 12, 0)

示例#5
0
from datetime import datetime
from os.path import dirname, join

import pytest
from city_scrapers_core.constants import COMMISSION, PASSED
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.chi_ssa_48 import ChiSsa48Spider

test_response = file_response(
    join(dirname(__file__), "files", "chi_ssa_48.html"),
    url="https://oldtownchicago.org/ssa-48/",
)
spider = ChiSsa48Spider()

freezer = freeze_time("2020-01-04")
freezer.start()

parsed_items = [item for item in spider.parse(test_response)]

freezer.stop()


def test_title():
    assert parsed_items[0]["title"] == "Commission"


def test_description():
    assert parsed_items[0]["description"] == ""
from datetime import datetime
from os.path import dirname, join

import pytest  # noqa
from city_scrapers_core.constants import FORUM, TENTATIVE
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.det_charter_review import DetCharterReviewSpider

test_response = file_response(
    join(dirname(__file__), "files", "det_charter_review.json"),
    url=(
        "https://detroitmi.gov/events/detroit-charter-revision-commission-meeting-economic-growth-development-3-20-19"  # noqa
    ),
)
spider = DetCharterReviewSpider()

freezer = freeze_time("2019-04-23")
freezer.start()

parsed_items = [item for item in spider.parse(test_response)]

freezer.stop()


def test_title():
    assert parsed_items[0][
        "title"] == "Citizen Focus Group: Equitable Planning & Zo"

示例#7
0
from datetime import datetime
from os.path import dirname, join

import pytest
from city_scrapers_core.constants import COMMISSION, PASSED
from city_scrapers_core.utils import file_response
from freezegun import freeze_time
from scrapy.settings import Settings

from city_scrapers.spiders.chi_board_elections import ChiBoardElectionsSpider

test_response = file_response(
    join(dirname(__file__), "files", "chi_board_elections.html"),
    url='https://app.chicagoelections.com/pages/en/board-meetings.aspx')
spider = ChiBoardElectionsSpider()
spider.settings = Settings(values={"CITY_SCRAPERS_ARCHIVE": False})

freezer = freeze_time('2018-11-30')
freezer.start()

parsed_items = [item for item in spider._next_meeting(test_response)]

freezer.stop()


def test_title():
    assert parsed_items[0]['title'] == 'Electoral Board'


def test_description():
    assert parsed_items[0]['description'] == ''
示例#8
0
from datetime import datetime
from os.path import dirname, join

import pytest
from city_scrapers_core.constants import NOT_CLASSIFIED
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.pa_energy import PaEnergySpider

test_response = file_response(
    join(dirname(__file__), "files", "pa_energy.html"),
    url="http://www.ahs.dep.pa.gov/CalendarOfEvents/Default.aspx?list=true",
)
spider = PaEnergySpider()

freezer = freeze_time("2019-05-11")
freezer.start()

parsed_items = [item for item in spider.parse(test_response)]

freezer.stop()


def test_tests():
    print(
        "Please write some tests for this spider or at least disable this one."
    )
    assert False

示例#9
0
from datetime import datetime
from os.path import dirname, join

import pytest
from city_scrapers_core.constants import COMMISSION, PASSED, TENTATIVE
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.chi_ssa_42 import ChiSsa42Spider

freezer = freeze_time('2018-11-07')
freezer.start()
spider = ChiSsa42Spider()
res = file_response(join(dirname(__file__), "files", "chi_ssa_42.html"),
                    url='https://ssa42.org/ssa-42-meeting-dates/')
minutes_res = file_response(join(dirname(__file__), "files",
                                 "chi_ssa_42_minutes.html"),
                            url='https://ssa42.org/minutes-of-meetings/')
parsed_items = [item for item in spider._parse_meetings(res, upcoming=True)
                ] + [item for item in spider._parse_meetings(minutes_res)]
freezer.stop()


def test_start():
    assert parsed_items[0]['start'] == datetime(2018, 11, 8, 18, 30)
    assert parsed_items[1]['start'] == datetime(2018, 9, 20, 18, 30)


def test_id():
    assert parsed_items[0][
        'id'] == 'chi_ssa_42/201811081830/x/ssa_42_commission'
示例#10
0
from datetime import datetime
from os.path import dirname, join

import pytest
from city_scrapers_core.constants import BOARD, COMMITTEE, PASSED
from city_scrapers_core.utils import file_response
from freezegun import freeze_time
from scrapy.settings import Settings

from city_scrapers.spiders.cook_hospitals import CookHospitalsSpider

test_response = file_response(
    join(dirname(__file__), "files", "cook_hospitals.html"),
    url=(
        'https://cookcountyhealth.org/about/board-of-directors/board-committee-meetings-agendas-minutes/'  # noqa
    ))
spider = CookHospitalsSpider()
spider.settings = Settings(values={"CITY_SCRAPERS_ARCHIVE": False})

freezer = freeze_time("2019-10-15")
freezer.start()

parsed_items = [item for item in spider.parse(test_response)]

freezer.stop()


def test_count():
    assert len(parsed_items) == 37

from datetime import datetime
from os.path import dirname, join

from city_scrapers_core.constants import COMMISSION
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.cook_human_rights import CookHumanRightsSpider

test_response = file_response(
    join(dirname(__file__), "files", "cook_human_rights.html"),
    url="https://www.cookcountyil.gov/event/"
    "cook-county-commission-human-rights-meeting-3",
)
test_response_filepage = file_response(
    join(dirname(__file__), "files", "cook_human_rights_file.html"),
    url="https://www.cookcountyil.gov/file/10402/",
)

spider = CookHumanRightsSpider()

freezer = freeze_time("2020-07-09")
freezer.start()
fake_formatted_date = datetime.strftime(
    datetime.strptime("July 2019", "%B %Y"), "%y-%m"
)
test_response_filepage.meta["formatted_date"] = fake_formatted_date
spider._parse_links(test_response_filepage)
parsed_items = spider._parse_event(test_response)

freezer.stop()
示例#12
0
from datetime import datetime
from os.path import dirname, join

import pytest  # noqa
from city_scrapers_core.constants import COMMITTEE, PASSED
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.summ_social_services_advisory import (
    SummSocialServicesAdvisorySpider, )

test_response = file_response(
    join(dirname(__file__), "files", "summ_social_services_advisory.eml"),
    url=(
        "https://city-scrapers-notice-emails.s3.amazonaws.com/summ_social_services_advisory/latest.eml"  # noqa
    ),
)
spider = SummSocialServicesAdvisorySpider()

freezer = freeze_time("2019-10-08")
freezer.start()

parsed_items = [item for item in spider.parse(test_response)]

freezer.stop()


def test_count():
    assert len(parsed_items) == 16

from datetime import datetime
from os.path import dirname, join

from city_scrapers_core.constants import BOARD
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.stl_development import StlDevelopmentSpider

test_response = file_response(
    join(dirname(__file__), "files", "stl_development.html"),
    url="https://www.stlouis-mo.gov/events/eventdetails.cfm?Event_ID=21838",
)

test_detail_response = file_response(
    join(dirname(__file__), "files", "stl_development_detail.html"),
    url="https://www.stlouis-mo.gov/events/eventdetails.cfm?Event_ID=21838",
)
spider = StlDevelopmentSpider()

freezer = freeze_time("2020-07-23")
freezer.start()

spider._parse_links(test_detail_response)
item = spider._parse_event(test_response)

freezer.stop()


def test_title():
    assert item["title"] == "St. Louis Local Development Company Board"
示例#14
0
from datetime import datetime
from operator import itemgetter
from os.path import dirname, join

import pytest  # noqa
from city_scrapers_core.constants import BOARD, PASSED
from city_scrapers_core.utils import file_response
from freezegun import freeze_time
from scrapy.http import XmlResponse

from city_scrapers.spiders.cle_metro_school_district import CleMetroSchoolDistrictSpider

init_test_response = file_response(
    join(dirname(__file__), "files", "cle_metro_school_district.xml"),
    url="https://www.boarddocs.com/oh/cmsd/board.nsf/XML-ActiveMeetings",
)
test_response = XmlResponse(
    url=init_test_response.url, request=init_test_response.request, body=init_test_response.body
)
spider = CleMetroSchoolDistrictSpider()

freezer = freeze_time("2019-09-09")
freezer.start()

parsed_items = sorted([item for item in spider.parse(test_response)], key=itemgetter("start"))

freezer.stop()


def test_count():
    assert len(parsed_items) == 250
示例#15
0
from datetime import datetime
from os.path import dirname, join

import pytest
from city_scrapers_core.constants import ADVISORY_COMMITTEE
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.cook_medical_examiner import CookMedicalExaminerSpider

test_response = file_response(
    join(dirname(__file__), "files", "cook_medical_examiner.html"),
    url="https://www.cookcountyil.gov/service/medical-examiners-advisory-committee",
)
spider = CookMedicalExaminerSpider()

freezer = freeze_time("2019-03-14")
freezer.start()

parsed_items = [item for item in spider.parse(test_response)]

freezer.stop()


def test_title():
    assert parsed_items[0]["title"] == "Medical Examiner's Advisory Committee"


def test_number():
    assert len(parsed_items) == 5
from datetime import datetime
from os.path import dirname, join

import pytest
from city_scrapers_core.constants import BOARD
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.pitt_urbandev import PittUrbandevSpider

test_response = file_response(
    join(dirname(__file__), "files", "pitt_urbandev.html"),
    url="https://www.ura.org/pages/board-meeting-notices-agendas-and-minutes",
)
spider = PittUrbandevSpider()

freezer = freeze_time("2020-01-25")
freezer.start()

parsed_items = [item for item in spider.parse(test_response)]


def test_description():
    assert parsed_items[0]["description"] == "Rescheduled board meeting"


def test_start():
    assert parsed_items[0]["start"] == datetime(2020, 1, 16, 14, 0)


def test_id():
示例#17
0
from datetime import datetime
from os.path import dirname, join

import pytest
from city_scrapers_core.constants import BOARD, COMMITTEE, PASSED
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.chi_low_income_housing_trust_fund import (
    ChiLowIncomeHousingTrustFundSpider)

freezer = freeze_time('2018-10-31')
freezer.start()
spider = ChiLowIncomeHousingTrustFundSpider()
cal_res = file_response(
    join(dirname(__file__), 'files', 'chi_low_income_housing_trust_fund.html'))
parsed_items = []
for item in spider._parse_calendar(cal_res):
    detail_res = file_response(
        join(dirname(__file__), 'files',
             'chi_low_income_housing_trust_fund_detail.html'))
    detail_res.meta['item'] = item
    parsed_items.append(spider._parse_detail(detail_res))
freezer.stop()


def test_title():
    assert parsed_items[0]['title'] == 'Finance Committee'
    assert parsed_items[1]['title'] == 'Allocations Committee'
    assert parsed_items[2]['title'] == 'Board Meeting'
from datetime import datetime
from os.path import dirname, join

import pytest
from city_scrapers_core.constants import CANCELLED, COMMITTEE
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.wayne_government_operations import WayneGovernmentOperationsSpider

freezer = freeze_time('2018-03-27')
freezer.start()
test_response = file_response(
    join(dirname(__file__), "files", "wayne_government_operations.html"),
    url=
    'https://www.waynecounty.com/elected/commission/government-operations.aspx'
)
spider = WayneGovernmentOperationsSpider()
parsed_items = [item for item in spider.parse(test_response)]
freezer.stop()


@pytest.mark.parametrize('item', parsed_items)
def test_description(item):
    assert item['description'] == ''


@pytest.mark.parametrize('item', parsed_items)
def test_location(item):
    assert item['location'] == spider.location
示例#19
0
from datetime import datetime
from os.path import dirname, join

from city_scrapers_core.constants import COMMISSION
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.il_police_professionalism import IlPoliceProfessionalismSpider

test_response = file_response(
    join(dirname(__file__), "files", "il_police_professionalism.html"),
    url="https://www.isp.state.il.us/media/pressdetails.cfm?ID=1028",
)
spider = IlPoliceProfessionalismSpider()

freezer = freeze_time("2019-09-11")
freezer.start()

item = spider._parse_item(test_response)

freezer.stop()


def test_title():
    assert item["title"] == "Commission on Police Professionalism"


def test_start():
    assert item["start"] == datetime(2019, 3, 28, 14, 0)

示例#20
0
from datetime import datetime
from os.path import dirname, join

import pytest
from city_scrapers_core.constants import COMMISSION
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.chi_ssa_50 import ChiSsa50Spider

test_response = file_response(
    join(dirname(__file__), "files", "chi_ssa_50.html"),
    url="http://southeastchgochamber.org/special-service-area-50/",
)
spider = ChiSsa50Spider()

freezer = freeze_time("2019-10-27")
freezer.start()

parsed_items = [item for item in spider.parse(test_response)]

freezer.stop()

expected = {
    "title": COMMISSION,
    "description": "",
    "classification": COMMISSION,
    "start": "2019-04-17 00:00:00",
    "end": None,
    "all_day": False,
    "time_notes": "",
示例#21
0
from datetime import datetime
from os.path import dirname, join

from city_scrapers_core.constants import BOARD, PASSED
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.cook_board_ethics import CookBoardEthicsSpider

test_response = file_response(
    join(dirname(__file__), "files", "cook_board_ethics.html"),
    url="https://www.cookcountyil.gov/event/cook-county-board-ethics-meeting-3",
)
spider = CookBoardEthicsSpider()

freezer = freeze_time("2019-10-9")
freezer.start()
item = spider._parse_event(test_response)
freezer.stop()


def test_title():
    assert item["title"] == "Board of Ethics"


def test_start():
    assert item["start"] == datetime(2019, 8, 29, 14)


def test_end():
    assert item["end"] == datetime(2019, 8, 29, 16)
from datetime import datetime
from operator import itemgetter
from os.path import dirname, join

import pytest
from city_scrapers_core.constants import BOARD, PASSED, TENTATIVE
from city_scrapers_core.utils import file_response
from freezegun import freeze_time
from scrapy.settings import Settings

from city_scrapers.spiders.il_procurement_policy import IlProcurementPolicySpider

test_response = file_response(
    join(dirname(__file__), "files", "il_procurement_policy.html"),
    url="https://www2.illinois.gov/sites/ppb/Pages/future_board_minutes.aspx",
)
spider = IlProcurementPolicySpider()
spider.settings = Settings(values={"CITY_SCRAPERS_ARCHIVE": False})

freezer = freeze_time("2019-10-07")
freezer.start()

parsed_items = [item for item in spider._upcoming_meetings(test_response)]


def test_count():
    assert len(parsed_items) == 1


def test_title():
    assert parsed_items[0]["title"] == "Procurement Policy Board"
from datetime import datetime
from os.path import dirname, join

import pytest
from city_scrapers_core.constants import BOARD
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.il_capital_development import IlCapitalDevelopmentSpider

test_response = file_response(
    join(dirname(__file__), "files", "il_capital_development.html"),
    url=
    "https://www2.illinois.gov/cdb/about/boardmeetings/Pages/20192020Meetings.aspx",
)
spider = IlCapitalDevelopmentSpider()

freezer = freeze_time("2019-10-26")
freezer.start()

parsed_items = [item for item in spider.parse(test_response)]

freezer.stop()


def test_title():
    assert parsed_items[0]["title"] == "Capital Development Board"


def test_description():
    assert parsed_items[0]["description"] == ""
示例#24
0
from datetime import datetime
from os.path import dirname, join

import pytest
from city_scrapers_core.constants import COMMISSION, PASSED, TENTATIVE
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.chi_ssa_25 import ChiSsa25Spider

test_response = file_response(
    join(dirname(__file__), "files", "chi_ssa_25.html"),
    url='http://littlevillagechamber.org/2019-meetings-minutes/')
spider = ChiSsa25Spider()

freezer = freeze_time('2019-03-17')
freezer.start()

parsed_items = [item for item in spider.parse(test_response)]

freezer.stop()


def test_title():
    assert parsed_items[0]['title'] == 'Commission: Monthly'


def test_start():
    assert parsed_items[0]['start'] == datetime(2019, 1, 15, 9)
    assert parsed_items[-1]['start'] == datetime(2019, 12, 17, 9)
from datetime import datetime
from os.path import dirname, join

import pytest  # noqa
from city_scrapers_core.constants import COMMITTEE, PASSED
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.cuya_northeast_ohio_coordinating import (
    CuyaNortheastOhioCoordinatingSpider)

test_response = file_response(
    join(dirname(__file__), "files", "cuya_northeast_ohio_coordinating.html"),
    url=(
        "https://www.noaca.org/board-committees/noaca-board-and-committees/agendas-and-presentations/-toggle-all"  # noqa
    ))
test_detail_response = file_response(
    join(dirname(__file__), "files",
         "cuya_northeast_ohio_coordinating_detail.html"),
    url=
    "https://www.noaca.org/Home/Components/Calendar/Event/8261/7639?toggle=all&npage=2"
)
spider = CuyaNortheastOhioCoordinatingSpider()

freezer = freeze_time("2019-10-04")
freezer.start()

parsed_items = [item for item in spider.parse(test_response)]
parsed_item = [item for item in spider._parse_detail(test_detail_response)][0]

freezer.stop()
from datetime import datetime
from os.path import dirname, join

import pytest  # noqa
from city_scrapers_core.constants import COMMISSION, PASSED
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.det_library_commission import DetLibraryCommissionSpider

test_response = file_response(
    join(dirname(__file__), "files", "det_library_commission.html"),
    url='https://detroitpubliclibrary.org/meeting/4908')

spider = DetLibraryCommissionSpider()

freezer = freeze_time('2019-02-24')
freezer.start()
item = spider._parse_item(test_response)
freezer.stop()


def test_title():
    assert item['title'] == 'Regular Commission Meeting'


def test_description():
    assert item['description'] == ''


def test_start():
示例#27
0
from datetime import datetime
from os.path import dirname, join

import pytest  # noqa
from city_scrapers_core.constants import COMMISSION, PASSED
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.cuya_monument import CuyaMonumentSpider

test_response = file_response(
    join(dirname(__file__), "files", "cuya_monument.html"),
    url="http://bc.cuyahogacounty.us/en-US/Monument-Commission.aspx",
)
spider = CuyaMonumentSpider()

freezer = freeze_time("2019-09-25")
freezer.start()

parsed_items = [item for item in spider.parse(test_response)]

freezer.stop()


def test_count():
    assert len(parsed_items) == 2


def test_title():
    assert parsed_items[0]["title"] == "Monument Commission"
示例#28
0
from datetime import datetime
from os.path import dirname, join

import pytest  # noqa
from city_scrapers_core.constants import COMMISSION, PASSED
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.chi_ssa_43 import ChiSsa43Spider

test_response = file_response(
    join(dirname(__file__), "files", "chi_ssa_43.html"),
    url="https://rpba.org/ssa-43/",
)
spider = ChiSsa43Spider()

freezer = freeze_time("2019-07-01")
freezer.start()

parsed_items = [item for item in spider.parse(test_response)]

freezer.stop()


def test_count():
    assert len(parsed_items) == 26


def test_title():
    assert parsed_items[0]["title"] == "Commission"
    assert parsed_items[5]["title"] == "Emergency Meeting"
from datetime import datetime
from os.path import dirname, join

import pytest
from city_scrapers_core.constants import COMMITTEE, PASSED
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.wayne_health_human_services import WayneHealthHumanServicesSpider

freezer = freeze_time('2018-03-27')
freezer.start()
test_response = file_response(
    join(dirname(__file__), "files", "wayne_health_human_services.html"),
    url=
    'https://www.waynecounty.com/elected/commission/health-human-services.aspx'
)
spider = WayneHealthHumanServicesSpider()
parsed_items = [item for item in spider.parse(test_response)]
freezer.stop()


@pytest.mark.parametrize('item', parsed_items)
def test_description(item):
    assert item['description'] == ''


@pytest.mark.parametrize('item', parsed_items)
def test_location(item):
    assert item['location'] == spider.location
from datetime import datetime
from os.path import dirname, join

import pytest
from city_scrapers_core.constants import BOARD, CANCELLED, TENTATIVE
from city_scrapers_core.utils import file_response
from freezegun import freeze_time

from city_scrapers.spiders.chi_labor_retirement_fund import ChiLaborRetirementFundSpider

test_response = file_response(
    join(dirname(__file__), "files", "chi_labor_retirement_fund.html"),
    url="http://www.labfchicago.org/agendas-minutes",
)
spider = ChiLaborRetirementFundSpider()

freezer = freeze_time("2019-02-10")
freezer.start()

parsed_items = [item for item in spider.parse(test_response)]

freezer.stop()


def test_count():
    assert len(parsed_items) == 25


def test_title():
    assert parsed_items[0]["title"] == "Retirement Board"
    assert parsed_items[4]["title"] == "Special Meeting"