def update(): # check if we need to download static data if not models.ProcedureType.objects.exists(): c = StaticDataCrawler() c.retrieve_and_save_all() # check if we need to create categories if not models.Category.objects.exists(): from contracts.tools.cpvs import build_categories build_categories() # retrieve latest dynamic data. crawler = DynamicDataCrawler() affected_entities = crawler.update_all() # update entities cached data for entity in models.Entity.objects.all(): entity.compute_data() # update categories cached data for category in models.Category.objects.all(): category.compute_data() # update analysis for analysis in list(analysis_manager.values()): analysis.update()
def handle(self, **options): if options['entities'] or options['all']: for entity in Entity.objects.filter(data__is_updated=False): entity.compute_data() if options['categories'] or options['all']: for category in Category.objects.all(): category.compute_data() if options['analysis'] or options['all']: for analysis in list(analysis_manager.values()): analysis.update()
def handle(self, **options): if options['entities'] or options['all']: for entity in Entity.objects.filter(data__is_updated=False): entity.compute_data() if options['categories'] or options['all']: for category in Category.objects.all(): category.compute_data() if options['analysis'] or options['all']: for analysis in list(analysis_manager.values()): analysis.update()
def recompute_analysis(): for analysis in list(analysis_manager.values()): analysis.update()