def schedule_ping(): rules = ScheduleRule.objects.filter(next_run__lte=datetime.datetime.now()) reports = [rule.report for rule in rules] set_of_reports_that_need_to_be_run = set(reports) for report in set_of_reports_that_need_to_be_run: report.run() clear_cache()
def post(self, *args, **kwargs): self.delete_many() clear_cache() parser = Parser() articles = [] for page in parser: articles.append(self.create(*page)) return jsonify({'data': render_template('articles.html', articles=articles)})
def test_clear_cache(self): strike1 = self.get_cached_counter() self.assertEqual(strike1, 1) cache.clear_cache("test_cache") strike2 = self.get_cached_counter() self.assertEqual(strike2, 2)
def seterr(divide=False): """ Should sympy raise an exception on 0/0 or return a nan? divide == True .... raise an exception divide == False ... return nan """ if _errdict["divide"] != divide: clear_cache() _errdict["divide"] = divide
def test_cache_classmethod(self): strike1 = ClsCached.proc_cls() self.assertEqual(strike1, 1) strike2 = ClsCached.proc_cls() self.assertEqual(strike2, 1) cache.clear_cache("test_cache_cls") strike3 = ClsCached.proc_cls() self.assertEqual(strike3, 2)
def test_cache_method(self): obj = ClsCached() strike1 = obj.proc_self() self.assertEqual(strike1, 1) strike2 = obj.proc_self() self.assertEqual(strike2, 1) cache.clear_cache("test_cache_self") strike3 = obj.proc_self() self.assertEqual(strike3, 2)
def console(request, report): report = request.report data = {'done': True} runs = Run.objects.filter(report=report).order_by("-created_at") if len(runs) > 0: last_run = runs[0] runlines = last_run.getLines() log_lines = [line[1] for line in runlines] data['log'] = log_lines data['done'] = last_run.success if last_run.success: data['url'] = reverse(view_run, kwargs={'report': report.slug, 'run': last_run.slug}) clear_cache() return HttpResponse(json.dumps(data), content_type="application/json")
def api(self, command, data): if command == 'echo': return data elif command == 'clear_cache': cache.clear_cache(data['project_id']) return {} elif command == 'compile': project_id = data['project_id'] sources = data['sources'] components = [ ComponentSpec(c['name'], c['ports']) for c in data['components'] ] compile.start_compilation(project_id, sources, components) return {} elif command == 'build_progress': project_id = data['project_id'] logs = [] is_running = project_id in compile.running_threads last_completed = cache.get_overlay_modified_date(project_id) # logs if project_id in compile.running_logs: while not compile.running_logs[project_id].empty(): logs.append(compile.running_logs[project_id].get_nowait()) progress = compile.running_logs[project_id].progress else: if last_completed != 0: progress = 100 else: progress = 0 # report if is_running: last_build_status = '' build_report = '' source_mappings = [] else: last_build_status, build_report = cache.get_build_report( project_id) source_mappings = cache.get_source_mapping(project_id) return { "running": is_running, "progress": progress, "last_completed": last_completed, "last_build_status": last_build_status, "build_report": build_report, "source_mappings": source_mappings, "logs": ''.join(logs) } elif command == 'cancel_build': project_id = data['project_id'] compile.cancel_compilation(project_id) return {} elif command == 'download_overlay_bit': project_id = data['project_id'] return {"file": cache.get_overlay_bit_path(project_id)} elif command == 'download_overlay_tcl': project_id = data['project_id'] return {"file": cache.get_overlay_tcl_path(project_id)} elif command == 'download_python_api': project_id = data['project_id'] return {"file": cache.get_python_api_path(project_id)} else: return self._error('Command not supported')
key_road_id_dict, road_id_geometry_dict = read_road('./shp/input/connected_road.shp') # track_id -> logs 字典 track_id_logs = read_track('./shp/input/track.shp') for track_id, logs in track_id_logs.items(): begin_tick = time.time() log_id_list = [log.uuid for log in logs] log_closest_points = defaultdict(list) for log in logs: project_points = get_closest_points(log, road_rtree, coord_feature_dict) log_closest_points[log.uuid] = project_points clear_cache() match_point_list = match_until_connect(log_id_list, log_closest_points) if match_point_list is not None: connected_vertex_path, connected_road_path = get_connected_path(match_point_list) if connected_vertex_path is not None: assert(connected_road_path is not None) out_c = fiona.open('./shp/output/new_path{}.shp'.format(track_id), 'w', driver=driver, crs=crs, schema=schema) for i, road_id in enumerate(connected_road_path): rec = { 'type': 'Feature', 'id': '-1', 'geometry': road_id_geometry_dict[int(road_id)], 'properties': OrderedDict([ ('idx', i)
def clear_cache(): cache_name = utils.get_str_arg(request, 'cache') if cache_name: cache.clear_cache(cache_name) return jsonify(results=[])