cards = creamySoup.find_all(class_="card-content vehicle-card-body") # Loops through all of the info cards to gather data for card in cards: year = card.find(class_="vehicle-card-year font-size-1").contents[0] try: price = (card.find(class_="heading-3 margin-y-1 font-weight-bold")).contents[0].replace(",", "").replace("$", "") except AttributeError: continue try: miles = str(card.find(class_="d-flex w-100 justify-content-between").contents[0]) miles = miles[miles.find("</svg>") + 6:miles.find("</div>"):] miles = miles.replace("<!-- -->", "").replace(",", "").replace(" miles", "") except AttributeError: continue # Puts the data into CSV format and writes it data = year + "," + price + "," + miles file.write(data) file.write("\n") # Closes the CSV file after adding scraped data file.close() print("Done Scraping TrueCar!") # Main Method if __name__ == "__main__": # Creates a path path = Path("output/truecar.csv") # Wipes the file formatUtil.fileWipe(path) # Runs the scraper scraper(path)
import formatUtil import config import truecar import cargurus import edmunds import autotrader import carsdotcom from pathlib import Path # Runs all of the scrapers into master.csv if __name__ == "__main__": # Prompts the user to change the config if they want config.CarConfig().promptConfigChange() # Stores the master CSV file path/name outputFile = Path("output/master.csv") # Wipes the file formatUtil.fileWipe(outputFile) # Adds headers formatUtil.addHeaders(outputFile) # Activates all the scrapers truecar.scraper(outputFile) cargurus.scraper(outputFile) edmunds.scraper(outputFile) autotrader.scraper(outputFile) carsdotcom.scraper(outputFile) # Checks for duplicate entries in the master.csv file formatUtil.removeDupe(outputFile) print("Completely Finished!")