def post_ajax(self, request, *args, **kwargs): url = self.parse_url(request.POST.get('url', '').strip()) client_ip = get_client_ip(request) client_convert_count = ActivityLog.objects\ .get_current_day_convert_count_by_ip(client_ip) daily_limit = settings.DAILY_CONVERT_LIMIT limit_reached = client_convert_count >= daily_limit if url and not limit_reached: task = tasks.convert.delay(url, client_ip) result = AsyncResult(task.id) # TODO: We're tying up resources here as we're waiting for the task # to finish. Remove this later and have the AJAX request retry # until result.ready(). result.wait() data = { 'task_id': task.id, 'is_ready': False, } if result.successful(): if result.result: youtube_id = result.result['youtube_id'] filename = result.result['filename'] download_link = reverse( 'download_view', kwargs={'youtube_id': youtube_id, 'filename': filename} ) data['message'] = 'Conversion successful!' data['is_ready'] = True data['youtube_id'] = youtube_id data['title'] = result.result['title'] data['filename'] = filename data['download_link'] = download_link return self.render_json_response(data, status=200) data['message'] = 'Could not convert the video. Please make ' \ 'sure the URL you entered is correct and ' \ 'the video is no more than {} minutes long.'\ .format(settings.MAX_DURATION_SECONDS / 60) return self.render_json_response(data, status=200) data['message'] = 'Something went wrong :(' return self.render_json_response(data, status=500) if limit_reached: logger.warn('Client reached convert limit: %s', client_ip) message = "Sorry, but you've reached your daily convert limit " \ "of {}. Please try again tomorrow.".format(daily_limit) return self.render_json_response({'message': message}, status=200) return self.render_json_response({'message': 'Please provide a URL.'}, status=200)
def post_ajax(self, request, *args, **kwargs): url = self.parse_url(request.POST.get('url', '').strip()) client_ip = get_client_ip(request) client_convert_count = ActivityLog.objects\ .get_current_day_convert_count_by_ip(client_ip) daily_limit = settings.DAILY_CONVERT_LIMIT limit_reached = client_convert_count >= daily_limit if url and not limit_reached: task = tasks.convert.delay(url, client_ip) result = AsyncResult(task.id) # TODO: We're tying up resources here as we're waiting for the task # to finish. Remove this later and have the AJAX request retry # until result.ready(). result.wait() data = { 'task_id': task.id, 'is_ready': False, } if result.successful(): if result.result: youtube_id = result.result['youtube_id'] filename = result.result['filename'] download_link = reverse('download_view', kwargs={ 'youtube_id': youtube_id, 'filename': filename }) data['message'] = 'Conversion successful!' data['is_ready'] = True data['youtube_id'] = youtube_id data['title'] = result.result['title'] data['filename'] = filename data['download_link'] = download_link return self.render_json_response(data, status=200) data['message'] = 'Could not convert the video. Please make ' \ 'sure the URL you entered is correct and ' \ 'the video is no more than {} minutes long.'\ .format(settings.MAX_DURATION_SECONDS / 60) return self.render_json_response(data, status=200) data['message'] = 'Something went wrong :(' return self.render_json_response(data, status=500) if limit_reached: logger.warn('Client reached convert limit: %s', client_ip) message = "Sorry, but you've reached your daily convert limit " \ "of {}. Please try again tomorrow.".format(daily_limit) return self.render_json_response({'message': message}, status=200) return self.render_json_response({'message': 'Please provide a URL.'}, status=200)
def wait_for_task_execution(client): keys = None while not keys: _, keys = client.scan() for key in keys: result = AsyncResult(key[17:]) result.wait() assert result.status == 'SUCCESS'
def extract_audio(request): if request.method == 'POST': form = DownloadForm(request.POST) if form.is_valid(): client_ip = utils.get_client_ip(request) url = form.cleaned_data['url'] # Remove the list parameter from the URL as we currently don't # support conversion of an entire playlist. # TODO: Refactor this entire thing later. if url: qs = parse_qs(urlparse(url).query) if qs.get('list', None): del (qs['list']) parts = urlsplit(url) url = urlunsplit([ parts.scheme, parts.netloc, parts.path, urllib.urlencode(qs, True), parts.fragment ]) task = tasks.extract_audio.delay(url, client_ip) result = AsyncResult(task.id) result.wait() if result.successful(): data = {'success': True, 'id': task.id} if result.result: video_id = result.result['video_id'] filename = result.result['filename'] download_link = reverse('download_file', kwargs={ 'video_id': video_id, 'filename': filename }) data['video_id'] = video_id data['filename'] = filename data['download_link'] = download_link return HttpResponse(json.dumps(data)) else: return HttpResponse(json.dumps({'success': False})) else: message = 'Please enter a URL.' return HttpResponse( json.dumps({ 'form_valid': False, 'detail': message })) return HttpResponseForbidden
def connect(): if(redisCache.exists('a','b')): if(redisCache.exists('c')): emit('ready') else: if(redisCache.exists('task_id')): task=AsyncResult(redisCache.get('task_id')) task.wait() saveTaskData(task) emit('ready')
def wait_till_deployment_deleted(deployment_delete_task_id): with allow_join_result(): deployment_delete_task = AsyncResult(deployment_delete_task_id) print("Waiting for node deployment to be deleted...") deployment_delete_task.wait() if deployment_delete_task.successful(): print("Deployment deleted successfully.") return else: task_meta = deployment_delete_task.backend.get_task_meta( deployment_delete_task.id) print( f"Deployment delete failed: {task_meta.get('status')} with traceback:" f"{task_meta.get('traceback')}")
def test_import_brightmls(self): wait_for_celery() resp = self.session.post( 'https://localhost.localdomain/import', data={ 'submit': 'submit', 'url': 'https://matrix.brightmls.com/Matrix/Public/Portal.aspx?ID=16150598256', }) # Check that we got redirected try: self.assertEqual(resp.history[0].status_code, 302) except IndexError: print(resp.text) raise task_result = AsyncResult(json.loads(resp.history[0].text)['task_id'], app=celery) result = task_result.wait()
def test_import_realtor_com(self): wait_for_celery() resp = self.session.post( 'https://localhost.localdomain/import', data={ 'submit': 'submit', 'url': 'https://www.realtor.com/realestateandhomes-detail/2508-36th-Pl-SE_Washington_DC_20020_M61723-48772', 'content': open('realtor_com_detail_test.html').read() }) # Check that we got redirected try: self.assertEqual(resp.history[0].status_code, 302) except IndexError: print(resp.text) raise task_result = AsyncResult(json.loads(resp.history[0].text)['task_id'], app=celery) result = task_result.wait() resp=self.session.post( 'https://localhost.localdomain/import', headers={ 'Content-Type':'multipart/form-data; boundary=---------------------------350037505235270160363570564592' }, data=('''-----------------------------350037505235270160363570564592 Content-Disposition: form-data; name="_charset_" UTF-8 -----------------------------350037505235270160363570564592 Content-Disposition: form-data; name="__formid__" deform -----------------------------350037505235270160363570564592 Content-Disposition: form-data; name="url" https://www.realtor.com/realestateandhomes-detail/2508-36th-Pl-SE_Washington_DC_20020_M61723-48772 -----------------------------350037505235270160363570564592 Content-Disposition: form-data; name="content" -----------------------------350037505235270160363570564592 Content-Disposition: form-data; name="__start__" upload:mapping -----------------------------350037505235270160363570564592 Content-Disposition: form-data; name="upload"; filename="realtor_com_detail_test.html" Content-Type: text/html ''' + \ open('realtor_com_detail_test.html','rb').read().decode('latin-1') + \ ''' -----------------------------350037505235270160363570564592 Content-Disposition: form-data; name="uid" 29D6UADLFM -----------------------------350037505235270160363570564592 Content-Disposition: form-data; name="__end__" upload:mapping -----------------------------350037505235270160363570564592 Content-Disposition: form-data; name="submit" submit -----------------------------350037505235270160363570564592-- ''').encode('utf-8') ) # Check that we got redirected try: self.assertEqual(resp.history[0].status_code, 302) except IndexError: print(resp.text) raise task_result = AsyncResult(json.loads(resp.history[0].text)['task_id'], app=celery) result = task_result.wait()
def advance_job(self, job_id): job = self.graph[job_id] # There are some situations (mainly with running jobs on HPC where # one wants to manually finish a job. # # If that is done, the graph.json file has to be manually edit and # two things have to be done for the current job: # # 1. Set the "job_status" to "manually_finished" # 2. Add a new "manually_set_next_steps" item and manually add the # next steps. if job["job_status"] == "manually_finished": assert "manually_set_next_steps" in job for step in job["manually_set_next_steps"]: inputs = {} # Pass along previous inputs for everything but # the orchestrate task. That task can reset the # inputs and only pass along the required inputs. if job["task_type"] != "Orchestrate": inputs.update(job["inputs"]) if "inputs" in step: inputs.update(copy.deepcopy(step["inputs"])) prio = step["priority"] \ if "priority" in step else 0 self.graph.add_job(task_type=step["task_type"], inputs=inputs, priority=prio, from_node=job_id) self.graph.serialize() return if job["job_status"] == "running": # Check if it is still running. result = AsyncResult(id=job["celery_task_id"]) print("STATE:", result.state) # Still running. Nothing to be done. if result.state == "SENT": self.status["current_status"] = "OK" self.status["current_message"] = \ "Job '%s' currently running." % job_id # Run finished. A run should not really fail. elif result.state == "SUCCESS": return_value = result.wait() job["run_information"] = return_value if return_value["status"] == "success": job["job_status"] = "success" self.status["current_message"] = \ "Successfully completed job '%s'." % (job_id) self.status["current_status"] = "SUCCESS" if return_value["next_steps"]: for step in return_value["next_steps"]: inputs = {} # Pass along previous inputs for everything but # the orchestrate task. That task can reset the # inputs and only pass along the required inputs. if job["task_type"] != "Orchestrate": inputs.update(job["inputs"]) if "inputs" in step: inputs.update(copy.deepcopy(step["inputs"])) prio = step["priority"] \ if "priority" in step else 0 self.graph.add_job(task_type=step["task_type"], inputs=inputs, priority=prio, from_node=job_id) if "new_goal" in return_value: self.status["current_goal"] = return_value["new_goal"] elif return_value["status"] == "failed": job["job_status"] = "failed" self.status["current_status"] = "Failure" fs = return_value["fail_stage"] self.status["current_message"] = \ "Job '%s' failed at stage '%s' due to: '%s'" % ( job_id, fs, return_value[fs]["fail_reason"]) push_notifications.send_notification( title="Workflow encountered error.", message="Job exited with status 'failed'.") else: job["job_status"] = return_value["status"] self.status["current_status"] = "????" self.status["current_message"] = \ "Current status is not clear. Celery job returned " \ "with status '%s'." % return_value["status"] # No matter the outcome. Always save the graph. self.graph.serialize() # Run failed due to some programming error. elif result.state == "FAILURE": job["job_status"] = "failed" self.status["current_status"] = "System Error" msg = "Uncaught exception during celery task execution:\n\n%s"\ % result.traceback self.status["current_message"] = msg # Also send a push notification. push_notifications.send_notification(title="System error.", message=msg) # Catch unknown problem. else: job["job_status"] = result.state self.status["current_status"] = "????" self.status["current_message"] = \ "Current status is not clear." elif job["job_status"] == "not started": self.start_job(job_id) self.status["current_status"] = "OK" self.status["current_message"] = "Job '%s' started." % job_id elif job["job_status"] == "failed": # Nothing to do. Requires to restart the latest job! return else: raise NotImplementedError("'job_status' = '%s'" % job["job_status"])
def post(self): arguments = { k.lower(): self.get_argument(k) for k in self.request.arguments } response = {'status': 'error'} qParam = {} if 'token' in arguments: auths = tokens.get(arguments['token']) if auths is None: response[ 'message'] = 'Token does not exist or it expired. Please create a new one' self.set_status(403) else: user = auths[0] passwd = auths[1] response['status'] = 'ok' user_folder = os.path.join(Settings.UPLOADS, user) + '/' else: self.set_status(400) response['message'] = 'Need a token to generate a request' if response['status'] == 'ok': if 'band' in arguments: bands = arguments['band'].replace('[', '').replace( ']', '').replace("'", '').replace(' ', '') bands_set = set(bands.lower().split(',')) default_set = set(['g', 'r', 'i', 'z', 'y']) if bands_set.issubset(default_set): bands = bands.lower().replace('y', 'Y') qParam['bands'] = bands else: self.set_status(400) response['status'] = 'error' if response['status'] == 'ok': if 'ra' in arguments and 'dec' in arguments: try: ra = [ float(i) for i in arguments['ra'].replace('[', '').replace( ']', '').split(',') ] dec = [ float(i) for i in arguments['dec'].replace('[', '').replace( ']', '').split(',') ] df_pos = pd.DataFrame(np.array([ra, dec]).T, columns=['RA', 'DEC']) qParam['df_pos'] = df_pos # stype = "manual" if len(ra) != len(dec): self.set_status(400) response['status'] = 'error' response[ 'message'] = 'RA and DEC arrays must have same dimensions' except: self.set_status(400) response['status'] = 'error' response[ 'message'] = 'RA and DEC arrays must have same dimensions' elif 'expnum' in arguments: try: expnum = [ int(i) for i in arguments['expnum'].replace('[', '').replace( ']', '').split(',') ] qParam['expnum'] = expnum arguments.pop('expnum') except: self.set_status(400) response['status'] = 'error' response['message'] = "Invalid expnum!" elif 'nite' in arguments: try: night = [ int(i) for i in arguments['nite'].replace('[', '').replace( ']', '').split(',') ] qParam['night'] = night arguments.pop('nite') except: self.set_status(400) response['status'] = 'error' response['message'] = "Invalid nite!" else: response['status'] = 'error' response[ 'message'] = 'Missing input: at least one of expnum, nite or (ra, dec) has to be specified for the query!' #extra options if response['status'] == 'ok': if 'no_blacklist' in arguments: noBlacklist = arguments["no_blacklist"] == 'true' else: noBlacklist = False if 'ccdnum' in arguments: try: ccdnum = [ int(i) for i in arguments['ccdnum'].replace('[', '').replace( ']', '').split(',') ] qParam['ccdnum'] = ccdnum except: self.set_status(400) response['status'] = 'error' response['message'] = "Invalid ccdnum!" if 'nite' in arguments: try: night = [ int(i) for i in arguments['nite'].replace('[', '').replace( ']', '').split(',') ] qParam['night'] = night except: self.set_status(400) response['status'] = 'error' response['message'] = "Invalid nite!" if 'expnum' in arguments: try: expnum = [ int(i) for i in arguments['expnum'].replace('[', '').replace( ']', '').split(',') ] qParam['expnum'] = expnum except: self.set_status(400) response['status'] = 'error' response['message'] = "Invalid expnum!" if response['status'] == 'ok': #init jobid qTaskId = str(uuid.uuid4()) # put ra, dec into a datafram now = datetime.datetime.now() qTiid = user + '_mongo_' + qTaskId + '_{' + now.strftime( '%a %b %d %H:%M:%S %Y') + '}' # print (qParam) try: dtasks.getList.apply_async(args=[noBlacklist, qParam], task_id=qTiid) except Exception as e: response['status'] = 'error' response['message'] = str(e) else: res = AsyncResult(qTiid) result = res.wait(10) df_rt = pd.read_json(result) df_rt = df_rt[[ 'RA_CENT', 'DEC_CENT', 'FILENAME', 'BAND', 'EXPNUM', 'NITE', 'PFW_ATTEMPT_ID', 'CCDNUM', 'FULL_PATH' ]] response['list'] = df_rt.to_dict(orient='records') self.set_status(200) self.write(response) self.flush() self.finish()
def generate_tile(request, year: int, month: int, day: int, zoom: int, x_pixel: float, y_pixel: float) -> HttpResponse: """ get a mapnik tile, get it from cache if exist else it will be generated as a celery task :param request: django request :param year: request year as INT :param month: request month as INT :param day: request day as INT :param zoom: mapnik zoom level :param x_pixel: mapnik x coordinate :param y_pixel: mapnik y coordinate :return: """ # set tile cache key, where the celery task id & tile cache id is stored tile_cache_key: str = "{}-{}-{}-{}-{}-{}".format( int(year), int(month), int(day), int(zoom), int(x_pixel), int(y_pixel), ) # tile static typing tile: Optional[bytes] tile_process: AsyncResult # get tile cache tile_cache: Optional[dict] = cache.get(tile_cache_key, { "process_id": None, "tile_hash": None }) # check if process is running and wait for end if tile_cache: if tile_cache["process_id"]: tile_process = AsyncResult(tile_cache["process_id"]) for _ in range(0, env.int("TILE_GENERATOR_HARD_TIMEOUT", 360) * 2): sleep(0.5) tile_cache = cache.get(tile_cache_key, { "process_id": None, "tile_hash": None }) if tile_cache: if tile_cache["tile_hash"]: break # try get tile png & return it if tile_cache: if tile_cache["tile_hash"]: tile = cache.get(tile_cache["tile_hash"]) if tile: return HttpResponse(tile, content_type="image/jpeg") # if there is no tile process & no tile in cache, create one tile_process = async_generate_tile.delay( year=int(year), month=int(month), day=int(day), style_xml_template=OSM_CARTO_STYLE_XML, zoom=int(zoom), x_pixel=float(x_pixel), y_pixel=float(y_pixel), osm_cato_path=env("CARTO_STYLE_PATH"), cache_key=tile_cache_key, ) if not tile_cache: tile_cache = {"process_id": None, "tile_hash": None} tile_cache["process_id"] = tile_process.id # update cache if zoom <= env.int("ZOOM_LEVEL", 13): cache.set(tile_cache_key, tile_cache, None) else: cache.set(tile_cache_key, tile_cache, env.int("TILE_CACHE_TIME", 2592000)) try: tile_process.wait(timeout=env.int("TILE_GENERATOR_HARD_TIMEOUT", 360)) except exceptions.TimeoutError: return HttpResponse("Timeout when creating tile", status=500) except CoordinateOutOfRange as e: return HttpResponse(e, status=405) tile_cache["tile_hash"] = tile_process.get() tile = cache.get(tile_cache["tile_hash"]) if tile: return HttpResponse(tile, content_type="image/jpeg") return HttpResponse("Caching Error", status=500)
# -*- coding: utf-8 -*- # __author__: MUSIBII # __email__ : [email protected] # __file__ : check_result.py # __time__ : 2019-05-26 15:06 from celery.result import AsyncResult from celery_task.celery import cel asyncresult = AsyncResult(id='97d0f7a1-d6f6-436c-8887-593a6cf68f48', app=cel) if asyncresult.successful(): result = asyncresult.wait() print(result) elif asyncresult.failed(): print('执行失败') elif asyncresult.status == 'PENDING': print('任务等待中被执行') elif asyncresult.status == 'RETRY': print('任务异常中正在重试') elif asyncresult.status == 'STARTED': print('任务已经开始被执行')