def test_progress(self): """When a task finishes, it increments the progress counter properly.""" increment = expected = 25.0 key = decorators.get_prog_key('fake_func', self.pk) self.assertEqual(float(get_cache(key, 0.0)['progress']), 0.0) @decorators.lock_and_track def fake_func(import_file_pk): increment_cache(key, increment) fake_func(self.pk) self.assertEqual(float(get_cache(key, 0.0)['progress']), expected)
def get_adding_buildings_to_project_status_percentage(request): """ Returns percentage complete of background task for adding building to project. Payload:: {'project_loading_cache_key': Job identifier from add_buildings_to_project. } Returns:: {'status': 'success', 'progress_object': { 'status': job status, 'progress': percent job done (out of 100), 'progress_key': progress_key for job, 'numerator': number buildings added, 'denominator': total number of building to add } } """ body = json.loads(request.body) project_loading_cache_key = body.get('project_loading_cache_key') try: progress_object = get_cache(project_loading_cache_key) except: msg = "Couldn't find project loading key %s in cache " % project_loading_cache_key _log.error(msg) raise Exception(msg) return {'status': 'success', 'progress_object': progress_object}
def get_progress(request): """ Return the progress of the cleansing. """ import_file_id = request.GET.get('import_file_id') return get_cache(get_prog_key('get_progress', import_file_id))['progress']
def get_progress(request): """ Return the progress of the cleansing. """ import_file_id = request.GET.get("import_file_id") return get_cache(get_prog_key("get_progress", import_file_id))["progress"]
def result(self): """ Return the result from the cache :return: dict """ return get_cache(self.key)
def wait_for_task(self, key): prog = 0 while prog < 100: prog = int(get_cache(key)['progress']) # Call to sleep is required otherwise this method will hang. It # could be maybe to reduced less than 1 second. time.sleep(1.0)
def save(self): """Save the data to the cache""" # save some member variables self.data['total'] = self.total set_cache(self.key, self.data['status'], self.data) return get_cache(self.key)
def from_key(cls, key): data = get_cache(key) if 'func_name' in data and 'unique_id' in data: return cls(func_name=data['func_name'], unique_id=data['unique_id'], init_data=data) else: raise Exception("Could not find key %s in cache" % key)
def wait_for_task(key): """ wait for a celery task to finish running """ prog = 0 while prog < 100: prog = int(get_cache(key)['progress']) # Call to sleep is required otherwise this method will hang. time.sleep(0.5)
def cleaning_progress_pct(self): if not self.coercion_mapping_active and not self.coercion_mapping_queued and self.num_coercions_total > 0: return 100.0 if self.coercion_mapping_active: return get_cache(self.CLEANING_PROGRESS_KEY)['progress'] elif self.coercion_mapping_queued or not self.coercion_mapping_done: return 0.0 else: return 100.0
def _row_cb(i): data = get_cache("export_buildings__%s" % export_id) data['buildings_processed'] = i if data['total_buildings'] == 0 or not data['total_buildings']: data['progress'] = 100 else: data['progress'] = (i * 100) / data['total_buildings'] set_cache("export_buildings__%s" % export_id, data['status'], data)
def retrieve(self, request, pk): """ Get the progress (percent complete) for a task. Returns:: { 'progress_key': The same progress key, 'progress': Percent completion } """ progress_key = pk if get_cache(progress_key): return JsonResponse(get_cache(progress_key)) else: return JsonResponse({ 'progress_key': progress_key, 'progress': 0, 'status': 'waiting' })
def test_increment_cache(self): """Sum our progress by increments properly.""" expected = 25.0 test_key = make_key('increment_test') increment = 25.0 # Fresh increment, this initializes the value. increment_cache(test_key, increment) self.assertEqual(float(get_cache(test_key)['progress']), expected) # Increment an existing key increment_cache(test_key, increment) expected = 50.0 self.assertEqual(float(get_cache(test_key)['progress']), expected) # This should put us well over 100.0 in incrementation w/o bounds check. for i in range(10): increment_cache(test_key, increment) expected = 100.0 self.assertEqual(float(get_cache(test_key)['progress']), expected)
def load(self): """Read in the data from the cache""" # Merge the existing data with items from the cache, favor cache items self.data = dict( list(self.data.items()) + list(get_cache(self.key).items())) # set some member variables if self.data['progress_key']: self.key = self.data['progress_key'] if self.data['total']: self.total = self.data['total']
def get_adding_buildings_to_project_status_percentage(request): """ Returns percentage complete of background task for adding building to project. Payload:: { 'project_loading_cache_key': Job identifier from add_buildings_to_project. } Returns:: { 'status': 'success', 'progress_object': { 'status': job status, 'progress': percent job done (out of 100), 'progress_key': progress_key for job, 'numerator': number buildings added, 'denominator': total number of building to add } } """ body = json.loads(request.body) project_loading_cache_key = body.get('project_loading_cache_key') try: progress_object = get_cache(project_loading_cache_key) except: msg = "Couldn't find project loading key %s in cache " % project_loading_cache_key _log.error(msg) raise Exception(msg) return { 'status': 'success', 'progress_object': progress_object }
def cleansing_progress(self, request, pk=None): """ Return the progress of the cleansing. --- type: status: required: true type: string description: either success or error progress: type: integer description: status of background cleansing task parameter_strategy: replace parameters: - name: pk description: Import file ID required: true paramType: path """ import_file_id = pk prog_key = get_prog_key('get_progress', import_file_id) cache = get_cache(prog_key) return HttpResponse(cache['progress'])
def merge_status(self): return get_cache(self.merge_status_key)['status']
def pct_premerge_complete(self): return get_cache(self.premerge_progress_key)['progress']
def pct_merge_complete(self): return get_cache(self.merge_progress_key)["progress"]