def getEstablishments(city):
    establishmentsFound = []

    start = 1
    count = 1000
    more = True

    while more:
        establishmentList = scrapertools.getContent(BASE_URL + city['establishmentUrl']+'&start='+str(start)+'&count='+str(count))
        if establishmentList.find(text='No documents found') is not None:
            more = False
            continue
        start += count
        establishments = establishmentList.find_all('tr')
        for establishment in establishments:
            details = establishment.find_all('td')
            if len(details) == 4 and details[0] is not None and details[0].a is not None:
                establishmentsFound.append({
                    'name': scrapertools.getText(details[0]),
                    'url': details[0].a['href'],
                    'address': scrapertools.getText(details[2]),
                    'locality': city['locality'],
                    'last_inspection_date': scrapertools.getText(details[3])
                })

    return establishmentsFound
def getEstablishments(city):
    establishmentsFound = []

    establishmentList = scrapertools.getContent(BASE_URL + city['establishmentUrl'])
    establishments = establishmentList.find_all('tr')
    for establishment in establishments:
        details = establishment.find_all('td')
        if len(details) == 4 and details[0] is not None and details[0].a is not None:
            establishmentsFound.append({
                'name': scrapertools.getText(details[0]),
                'url': details[0].a['href'],
                'address': scrapertools.getText(details[2]),
                'locality': city['locality'],
                'last_inspection_date': scrapertools.getText(details[3])
            })

    return establishmentsFound
def getInspections(establishment, cityUrl):
    inspectionsFound = []
    
    establishmentDetails = scrapertools.getContent(BASE_URL + establishment['url'])
    inspections = establishmentDetails.find_all(text='Inspection Type')[0].find_parent('tr').find_all_next('tr')

    for inspection in inspections:
        details = inspection.find_all('td')

        if(details[0].a is None):
            continue
        
        violations = getViolations(BASE_URL + cityUrl + '/' + details[0].a['href'])
        inspectionsFound.append({
            'type': scrapertools.getText(details[0]),
            'date': scrapertools.getText(details[1]),
            'violations': violations
        })
    
    return inspectionsFound