def check_username(username, site_id, category_id, total, tracker_id, request_timeout=10, test=False): """ Check if `username` exists on the specified site. """ worker.start_job() redis = worker.get_redis() db_session = worker.get_session() # Make a splash request. site = db_session.query(Site).get(site_id) # Check site. splash_result = _splash_username_request(username, site, request_timeout) image_file = _save_image(db_session, splash_result) # Save result to DB. result = Result(tracker_id=tracker_id, site_name=splash_result['site']['name'], site_url=splash_result['url'], status=splash_result['status'], image_file_id=image_file.id, error=splash_result['error']) db_session.add(result) db_session.commit() if not test: # Notify clients of the result. current = redis.incr(tracker_id) result_dict = result.as_dict() result_dict['current'] = current # result_dict['image_file_url'] = image_file.url() # result_dict['image_name'] = image_file.name result_dict['total'] = total redis.publish('result', json.dumps(result_dict)) # If this username search is complete, then queue an archive job. if current == total: app.queue.schedule_archive(username, category_id, tracker_id) worker.finish_job() return result.id
def check_username(username, site_id, category_id, total, tracker_id, user_id, test=False): """ Check if `username` exists on the specified site. """ worker.start_job() redis = worker.get_redis() db_session = worker.get_session() # Get site site = db_session.query(Site).get(site_id) # Check site for username splash_result = _splash_username_request(username, site) # Save image file image_file = _save_image(db_session=db_session, scrape_result=splash_result, user_id=user_id, censor=site.censor_images) # Save result to DB. result = Result(tracker_id=tracker_id, site_id=splash_result['site']['id'], site_name=splash_result['site']['name'], site_url=splash_result['url'], status=splash_result['status'], image_file_id=image_file.id, username=username, error=splash_result['error'], user_id=user_id) if result.status == 'f': result.html = splash_result['html'] db_session.add(result) db_session.commit() if not test: # Notify clients of the result. current = redis.incr(tracker_id) result_dict = result.as_dict() result_dict['current'] = current # result_dict['image_file_url'] = image_file.url() # result_dict['image_name'] = image_file.name result_dict['total'] = total redis.publish('result', json.dumps(result_dict)) # If this username search is complete, then queue an archive job. if current == total: description = 'Archiving results ' \ 'for username "{}"'.format(username) worker.archive.create_archive.enqueue( username=username, category_id=category_id, tracker_id=tracker_id, jobdesc=description, timeout=_redis_worker['archive_timeout'], user_id=user_id) worker.finish_job() return result.id