Exemple #1
0
 def _make_legistar_call(self, since=None):
     les = LegistarEventsScraper()
     les.EVENTSPAGE = 'https://fpdcc.legistar.com/Calendar.aspx'
     les.BASE_URL = 'https://fpdcc.legistar.com'
     if not since:
         since = datetime.today().year
     return les.events(since=since)
Exemple #2
0
 def _make_legistar_call(self, since=None):
     les = LegistarEventsScraper(jurisdiction=None, datadir=None)
     les.EVENTSPAGE = 'https://cook-county.legistar.com/Calendar.aspx'
     les.BASE_URL = 'https://cook-county.legistar.com'
     if not since:
         since = datetime.today().year
     return les.events(since=since)
Exemple #3
0
def refresh_events(jurisdiction):
    s = LegistarEventsScraper()
    s.EVENTSPAGE = 'https://{}.legistar.com/Calendar.aspx'.format(jurisdiction)

    page = next(s.eventPages('2018-01-01'))

    save_page(page, jurisdiction, 'events.html')
 def _make_legistar_call(self, since=None):
     les = LegistarEventsScraper(requests_per_minute=0)
     les.EVENTSPAGE = 'https://cook-county.legistar.com/Calendar.aspx'
     les.BASE_URL = 'https://cook-county.legistar.com'
     if not since:
         since = datetime.today().year
     return les.events(since=since)
 def _make_legistar_call(self, since=None):
     les = LegistarEventsScraper()
     les.EVENTSPAGE = 'https://chicago.legistar.com/Calendar.aspx'
     les.BASE_URL = 'https://chicago.legistar.com'
     if not since:
         since = self.year
     return les.events(since=self.year)
def refresh_events(jurisdiction):
    s = LegistarEventsScraper()
    s.EVENTSPAGE = 'https://{}.legistar.com/Calendar.aspx'.format(jurisdiction)

    page = next(s.eventPages('2018-01-01'))

    save_page(page, jurisdiction, 'events.html')
Exemple #7
0
 def _call_legistar(self, since=None):
     les = LegistarEventsScraper()
     les.BASE_URL = self.base_url
     les.EVENTSPAGE = "{}/Calendar.aspx".format(self.base_url)
     if not since:
         since = datetime.today().year
     return les.events(since=since)
Exemple #8
0
 def _make_legistar_call(self, since=None):
     urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
     les = LegistarEventsScraper()
     les.EVENTSPAGE = self.START_URL + '/Calendar.aspx'
     les.BASE_URL = self.START_URL
     if not since:
         since = datetime.today().year
     return les.events(since=since)
def test_parse_events(project_directory, mocker, jurisdiction):
    events_fixture = os.path.join(project_directory, 'tests', 'fixtures', jurisdiction, 'events.html')

    scraper = LegistarEventsScraper()
    scraper.BASE_URL = '{}.legistar.com'.format(jurisdiction)

    with open(events_fixture, 'r') as f:
        page = lxml.html.fromstring(f.read())
        mocker.patch.object(scraper, 'eventPages', return_value=page)
        result, _ = next(scraper.events(follow_links=False))
        print(result)
def test_parse_events(project_directory, mocker, jurisdiction):
    events_fixture = os.path.join(project_directory, 'tests', 'fixtures',
                                  jurisdiction, 'events.html')

    scraper = LegistarEventsScraper()
    scraper.BASE_URL = '{}.legistar.com'.format(jurisdiction)

    with open(events_fixture, 'r') as f:
        page = lxml.html.fromstring(f.read())
        mocker.patch.object(scraper, 'eventPages', return_value=page)
        result, _ = next(scraper.events(follow_links=False))
        print(result)
 def _gen_legistar_fixtures(self, name, start_url):
     """Creates fixtures from a Legistar response"""
     events = []
     les = LegistarEventsScraper()
     les.BASE_URL = start_url
     les.EVENTSPAGE = "{}/Calendar.aspx".format(start_url)
     for event, _ in les.events(since=datetime.today().year):
         events.append((dict(event), None))
     fixture_file = join(self.fixtures_dir, "{}.json".format(name))
     with open(fixture_file, "w", encoding="utf-8") as f:
         json.dump(events, f)
     print("Created file: {}".format(fixture_file))
     return "{}.json".format(name)
Exemple #12
0
    def scrapeWebCalendar(self):
        web_scraper = LegistarEventsScraper(None, None)
        web_scraper.EVENTSPAGE = self.EVENTSPAGE
        web_scraper.BASE_URL = self.WEB_URL
        web_scraper.TIMEZONE = "America/Los_Angeles"
        web_scraper.date_format = '%m/%d/%Y'
        web_info = {}

        for event, _ in web_scraper.events():
            # Make the dict key (name, date-as-datetime, time), and add it.
            key = (event['Name']['label'],
                   web_scraper.toTime(event['Meeting Date']).date(),
                   event['Meeting Time'])
            web_info[key] = event

        return web_info