def save(job): url_obj = URL() url_obj.url = job['url'] del job['url'] url_obj.state = State.objects.get_or_create(state="scraped")[0] url_obj.provider = Provider.objects.get_or_create( name="schonherz" )[0] url_obj.scraped_data = json.dumps(job, ensure_ascii=False) url_obj.save()
def update_scraped_db(self): """ Felveszi a scrapelt db-be a munkat scraped statusszal """ url_obj = URL() url_obj.url = self.job_attrs['url'] url_obj.state = State.objects.get_or_create(state="scraped")[0] url_obj.provider = Provider.objects.get_or_create( name = self.provider_name )[0] url_obj.scraped_data = json.dumps(self.job_attrs, ensure_ascii=False) url_obj.save()
def save(job): """ Saves the given job to the URL table. :param job: dictionary containing job attributes """ url_obj = URL() url_obj.url = job['url'] del job['url'] url_obj.state = State.objects.get_or_create(state="scraped")[0] url_obj.provider = Provider.objects.get_or_create( name = "eudiakok" )[0] url_obj.scraped_data = json.dumps(job, ensure_ascii=False) url_obj.save()