def populate_library_data(): if len(LastUpdateLibraryDate.objects) == 0: LastUpdateLibraryDate(DEFAULT_INITIAL_DATE).save() size = LastUpdateLibraryDate.objects.count() last_update = LastUpdateLibraryDate.objects[size - 1].create_date LastUpdateLibraryDate(str(datetime.now())).save() parser_yaml = ParserYAML() urls = parser_yaml.get_multi_instances_urls for url in urls: request = RequestLibraryRawData(last_update, url).data new_url = clean_url(url) for library in request: date = library["createTimestamp"]['date'] accessibility = str(library["acessibilidade"]).capitalize() if accessibility == '' or accessibility == 'None': accessibility = 'Não definido' LibraryData(new_url, library["type"]['name'], accessibility, date).save() for area in library["terms"]["area"]: LibraryArea(new_url, str(area).title()).save() for tag in library["terms"]["tag"]: LibraryTags(new_url, str(tag).title()).save()
def populate_agent_data(): if len(LastUpdateAgentsDate.objects) == 0: LastUpdateAgentsDate(DEFAULT_INITIAL_DATE).save() size = LastUpdateAgentsDate.objects.count() last_update = LastUpdateAgentsDate.objects[size - 1].create_date LastUpdateAgentsDate(str(datetime.now())).save() parser_yaml = ParserYAML() urls = parser_yaml.get_multi_instances_urls for url in urls: if (last_update == DEFAULT_INITIAL_DATE) and (url == SP_URL or url == ESTADO_SP_URL): request = EmptyRequest() for year in range(DEFAULT_YEAR, CURRENT_YEAR): single_request = RequestAgentsInPeriod(year, url) request.data += single_request.data request = request.data else: request = RequestAgentsRawData(last_update, url).data new_url = clean_url(url) for agent in request: date = agent["createTimestamp"]['date'] AgentsData(new_url, str(agent['type']['name']), date).save() for area in agent["terms"]["area"]: AgentsArea(new_url, str(area).title()).save()
def populate_event_data(): if len(LastUpdateEventDate.objects) == 0: LastUpdateEventDate(DEFAULT_INITIAL_DATE).save() size = LastUpdateEventDate.objects.count() last_update = LastUpdateEventDate.objects[size - 1].create_date LastUpdateEventDate(str(datetime.now())).save() parser_yaml = ParserYAML() urls = parser_yaml.get_multi_instances_urls for url in urls: if (last_update == DEFAULT_INITIAL_DATE) and (url == SP_URL or url == ESTADO_SP_URL): request = EmptyRequest() for year in range(DEFAULT_YEAR, CURRENT_YEAR): single_request = RequestEventsInPeriod(year, url) request.data += single_request.data request = request.data else: request = RequestEventsRawData(last_update, url).data new_url = clean_url(url) for event in request: date = event["createTimestamp"]['date'] if str(event['classificacaoEtaria']) != '': EventData(new_url, str(event['classificacaoEtaria']).title(), event['occurrences'], date).save() for language in event["terms"]["linguagem"]: EventLanguage(new_url, language).save() populate_mixed_data(last_update)
def populate_space_data(): if len(LastUpdateDate.objects) == 0: LastUpdateDate(DEFAULT_INITIAL_DATE).save() size = LastUpdateDate.objects.count() last_update = LastUpdateDate.objects[size - 1].create_date LastUpdateDate(str(datetime.now())).save() parser_yaml = ParserYAML() urls = parser_yaml.get_multi_instances_urls for url in urls: request = RequestSpacesRawData(last_update, url).data new_url = clean_url(url) for space in request: date = space["createTimestamp"]['date'] SpaceData(new_url, str(space['name']), date, str(space['type']['name'])).save() for area in space["terms"]["area"]: OccupationArea(new_url, area).save()