def importer(request): ###Write a scraper dispatcher here. if request.method == 'POST': form = ImportForm(request.POST.copy()) if form.is_valid() and request.user.is_authenticated(): user=request.user urls=[form.cleaned_data['url']] scrapeTask.delay(urls, user) return draftview(request, scraperMessage=True)
def importer(request): ###Write a scraper dispatcher here. if request.method == 'POST': form = ImportForm(request.POST.copy()) if form.is_valid() and request.user.is_authenticated(): userPK=request.user.pk url=form.cleaned_data['url'] task=parse(url)#get the kind of task to execute for a given url! print("importer attempting to import from : {}".format(url)) print("for user : {}".format(userPK)) if task: print("importing from {}".format(task.__name__)) task.delay(url=url,userPK=userPK)#delay(url=url, userPK=userPK) else: # neeto unknown site error! these should prolly get logged. pass ##else we need to be giving them shiny errors as to why it isn't valid. return draftview(request, scraperMessage=True)