else: url = (yangcatalog_api_prefix + 'load-cache') LOGGER.info('{}'.format(url)) response = requests.post(url, None, auth=(args.credentials[0], args.credentials[1])) if response.status_code != 201: LOGGER.warning('Could not send a load-cache request') try: shutil.rmtree('../api/cache') except OSError: # Be happy if deleted pass if args.notify_indexing: LOGGER.info('Sending files for indexing') send_to_indexing(yangcatalog_api_prefix, '../parseAndPopulate/' + direc + '/prepare.json', args.credentials, apiIp=args.api_ip, from_api=False, set_key=key, force_indexing=args.force_indexing) if thread is not None: thread.join() try: shutil.rmtree('../parseAndPopulate/' + direc) shutil.rmtree('../api/cache') except OSError: # Be happy if deleted pass
# In each json LOGGER.info('Starting to add vendors') for data in files: # Prepare json_data for put request - this request will prepare list vendors # to populate it with protocols and modules json_implementations_data = json.dumps(data) # Make a PUT request to create a root for each file http_request(prefix + '/api/config/catalog/vendors/', 'PATCH', json_implementations_data, args.credentials) if not args.api: if args.notify_indexing: LOGGER.info('Sending files for indexing') send_to_indexing(direc + '/prepare.json', args.credentials, args.ip, from_api=False) LOGGER.info('Removing temporary json data and cache data') shutil.rmtree('./' + direc) try: shutil.rmtree('../api/cache') except OSError: # Be happy if deleted pass try: LOGGER.info('Sending request to reload cache') http_request( args.api_protocol + '://' + args.api_ip + ':' + args.api_port + '/load-cache', 'POST', None, args.credentials) except: LOGGER.warning('Could not send a load-cache request')
old = f.read() except: old = '' with open('../parseAndPopulate/' + direc + '/prepare.json', 'r') as f: new = f.read() if old != new: do_indexing = True shutil.copy('../parseAndPopulate/' + direc + '/prepare.json', './old/.') if args.notify_indexing and do_indexing: LOGGER.info('Sending files for indexing') send_to_indexing('../parseAndPopulate/' + direc + '/prepare.json', args.credentials, args.ip, args.api_port, args.api_protocol, from_api=False, set_key=key, force_indexing=args.force_indexing) LOGGER.info('Removing temporary json data and cache data') try: shutil.rmtree('../api/cache') except OSError: # Be happy if deleted pass try: LOGGER.info('Sending request to reload cache') http_request( args.api_protocol + '://' + args.api_ip + ':' + repr(args.api_port) + '/load-cache', 'POST', None,
response = requests.patch(url, json_modules_data, auth=(args.credentials[0], args.credentials[1]), headers={ 'Accept': 'application/vnd.yang.data+json', 'Content-type': 'application/vnd.yang.data+json'}) if response.status_code < 200 or response.status_code > 299: LOGGER.error('Request with body on path {} failed with {}'. format(json_modules_data, url, response.content)) try: shutil.rmtree('../api/cache') except OSError: # Be happy if deleted pass url = (yangcatalog_api_prefix + 'load-cache') LOGGER.info('{}'.format(url)) response = requests.post(url, None, auth=(args.credentials[0], args.credentials[1])) if response.status_code != 201: LOGGER.warning('Could not send a load-cache request') if args.notify_indexing: LOGGER.info('Sending files for indexing') send_to_indexing(yangcatalog_api_prefix, '../parseAndPopulate/' + direc + '/prepare.json', args.credentials, apiIp=args.api_ip, from_api=False, set_key=key, force_indexing=args.force_indexing) shutil.rmtree('../parseAndPopulate/' + direc)