async def logger_middleware(request, handler): response = await handler(request) json_data = json.dumps({"response.status": response.status, "path_qs": request.path_qs, "method": request.method}) apikey = request.rel_url.query.get("apikey") logger.info("Apikey: {} make request {}".format(apikey, json_data)) await to.execute(to.insert_to_logs, tuple_params=(apikey, request.remote, json_data)) return response
def _search(self, query, context=None): """Search the iTunes API for an album track matching the provided query """ if context is not None: self.context.update(context) results = itunes.search_track(query) track = self._filter_results(results) LOGGER.info('iTunes Search Result: %s', str(track)) if track is None: LOGGER.error('{} not found in iTunes'.format(query)) return self.context.update(dict(Album=track.album, Genre=track.genre, contentID=track.id, Copyright=track.album.copyright)) # Albumart url = track.artwork.get('600', '') artwork = self.has_artwork(url.value) if artwork: self.context['Artwork'] = artwork self.context['Track #'] = '{}/{}'.format(self.context['Track #'], track.album.track_count) self.context['Release Date'] = track.album.release_date_raw self.context['Album Artist'] = self.context['Artist'] if track.json['trackExplicitness'].lower() == 'explicit': self.context['_explicit'] = 'Explicit'
def download_and_unzip(url: str) -> str: """Download and unzip a multi-zip file""" dest_dir = mkdtemp(prefix=f"{settings.tmp_file_prefix}") logger.info(f"Saving to {dest_dir}") filename = get_filename_from_url(url) try: response = http.get(url) except Exception as e: logger.error(e) save_path = Path(dest_dir) / filename with save_path.open("wb+") as fh: fh.write(response.content) logger.info(f"Wrote file to {save_path}") with ZipFile(save_path) as zf: zf.extractall(dest_dir) os.remove(save_path) for _, _, files in os.walk(dest_dir): for file in files: file_path = Path(dest_dir) / file if file_path.suffix.lower() == ".zip": with ZipFile(file_path) as zf: zf.extractall(dest_dir) os.remove(file_path) return dest_dir
def _search(self, query, context=None): if context is not None: self.context.update(context) results = itunes.search_movie(query) movie = self._filter_results(results) LOGGER.info('iTunes Search Result: %s', str(movie)) return self._apply_mapping(movie)
def _search(self, query): """Search iTunes for a TV Episode matching *query*""" results = itunes.search_episode(query) episode = self.filter_results(query, results) LOGGER.info('iTunes Search Result: %s', str(episode)) if episode is not None: return self._apply_mapping(episode) return self.context
def get_host_mgmt_ip(config): ntw_list = config.host_info[0].nw_intf_list logger.info('Management interface is:' + ntw_list[0].is_mgmt_intf) for intf in ntw_list: if intf.is_mgmt_intf == 'yes': logger.info('Host Managment IP is: ' + intf.ip_address) host_ip = intf.ip_address return host_ip
def cleanup(self, cimc_util_obj, config): logger.info('In clenup section of BiosUpdateCdnTest') vic_obj = VicLib(cimc_util_obj, config) for val in self.created_vnic_list: slot_no = val.split(' ')[0] dev_name = val.split(' ')[1] logger.info('Delete vnic {} interface on slot {}'.format( dev_name, slot_no)) vic_obj.delete_vnic(slot_no, dev_name)
def compliance_ver(self,d,ver,logger): self.d=d print("Access compliance for pure version"+ver) logger.info("Access compliance for pure version"+ver) self.d.find_element_by_xpath('//*[@id="dijit_form_FilteringSelect_0"]').click() self.d.find_element_by_xpath('//*[@id="dijit_form_FilteringSelect_0"]').clear() self.d.find_element_by_xpath('//*[@id="dijit_form_FilteringSelect_0"]').send_keys(ver) self.d.find_element_by_xpath('//*[@id="dijit_form_FilteringSelect_0"]').send_keys(Keys.ENTER) self.getdata(d,logger)
def getdata(self, d, logger): global Label global IP global Installedversion global BaseVersion global recommendedVersion global Compliancestatus global paras self.data = [] self.rowslist = [] self.rowslist = d.find_elements_by_xpath('.//table') for table in self.rowslist: self.data_row = [] self.tds = table.find_elements_by_tag_name('td') if self.tds: for td in self.tds: self.row_data = td.text if self.row_data != '': self.data_row.append(self.row_data) if self.data_row != []: self.data.append(self.data_row) print("******************RAW DATA******************") print(self.data) del self.data[0] #del Button Names self.headings = self.data[0] del self.data[0] #del Column Headings del self.data[len(self.data) - 1] #Total Section self.numbering = [0, 1, 2, 3, 4, 5, 6, 7] for i in range(len(self.headings)): self.numbering[i] = i if self.headings[i] == 'Label': Label = i print(str(i) + " is Label") elif self.headings[i] == 'IP Address': IP = i print(str(i) + " is IP Address") elif self.headings[i] == 'Installed Version': Installedversion = i print(str(i) + " is Installed Version") elif self.headings[i] == 'Base Version': BaseVersion = i print(str(i) + " is Base Version") elif self.headings[i] == 'Recommended Version': recommendedVersion = i print(str(i) + " is Recommended Version") elif self.headings[i] == 'Compliance Status': Compliancestatus = i print(str(i) + " is Compliance status") print(self.data) with open('devices.yml', 'r') as f: self.paras = yaml.load(f) print(self.paras) logger.info('paras= ' + str(self.paras)) self.Compliance_validate(self.data, self.paras, logger)
def getdata(self,d,logger): global Label global IP global Installedversion global BaseVersion global recommendedVersion global Compliancestatus global paras self.data=[] self.rowslist=[] self.rowslist = d.find_elements_by_xpath('.//table') for table in self.rowslist: self.data_row=[] self.tds=table.find_elements_by_tag_name('td') if self.tds: for td in self.tds: self.row_data=td.text if self.row_data!= '': self.data_row.append(self.row_data) if self.data_row!= []: self.data.append(self.data_row) print("******************RAW DATA******************") print(self.data) del self.data[0]#del Button Names self.headings=self.data[0] del self.data[0]#del Column Headings del self.data[len(self.data)-1]#Total Section self.numbering=[0,1,2,3,4,5,6,7] for i in range(len(self.headings)): self.numbering[i] = i if self.headings[i]=='Label': Label=i print(str(i)+" is Label") elif self.headings[i]=='IP Address': IP=i print(str(i)+" is IP Address") elif self.headings[i]=='Installed Version': Installedversion=i print(str(i)+" is Installed Version") elif self.headings[i]=='Base Version': BaseVersion=i print(str(i)+" is Base Version") elif self.headings[i]=='Recommended Version': recommendedVersion=i print(str(i)+" is Recommended Version") elif self.headings[i]=='Compliance Status': Compliancestatus=i print(str(i)+" is Compliance status") print(self.data) with open('devices.yml','r') as f : self.paras = yaml.load(f) print(self.paras) logger.info('paras= '+str(self.paras)) self.Compliance_validate(self.data,self.paras,logger)
def predict_fn(input_data, model): logger.info('Generating prediction based on input parameters.') if torch.cuda.is_available(): input_data = input_data.view(1, 3, 224, 224).cuda() else: input_data = input_data.view(1, 3, 224, 224) with torch.no_grad(): model.eval() out = model(input_data) ps = torch.exp(out) return ps
def slot_interface(self, cimc_util_obj): mgmt_handle = cimc_util_obj.handle output = mgmt_handle.execute_cmd_list('top', 'scope chassis', 'show pci-adapter detail') interface_id = [] for block in output.split("---")[1:]: slot = re.search('slot:*\s+([^\r\n]+)', block).group(1) interface_id.append(slot) logger.info("Interface info..." + str(interface_id)) return interface_id
def product_cimc(self, cimc_util_obj): mgmt_handle = cimc_util_obj.handle output = mgmt_handle.execute_cmd_list('top', 'scope chassis', 'show pci-adapter detail') product_name = [] for block in output.split("---")[1:]: product = re.search('product-name:*\s+([^\r\n]+)', block).group(1) product_name.append(product) logger.info("Product name info...." + str(product_name)) return product_name
def fw_cimc_cli(self, cimc_util_obj): mgmt_handle = cimc_util_obj.handle output = mgmt_handle.execute_cmd_list('top', 'scope chassis', 'show pci-adapter detail') fw_id = [] logger.info("Output......" + output) for block in output.split("---")[1:]: firmware = re.search('fw-version:*\s+([^\r\n]+)', block).group(1) fw_id.append(firmware) logger.info("Firmware info ...." + firmware) return fw_id
def compliance_ver(self, d, ver, logger): self.d = d print("Access compliance for pure version" + ver) logger.info("Access compliance for pure version" + ver) self.d.find_element_by_xpath( '//*[@id="dijit_form_FilteringSelect_0"]').click() self.d.find_element_by_xpath( '//*[@id="dijit_form_FilteringSelect_0"]').clear() self.d.find_element_by_xpath( '//*[@id="dijit_form_FilteringSelect_0"]').send_keys(ver) self.d.find_element_by_xpath( '//*[@id="dijit_form_FilteringSelect_0"]').send_keys(Keys.ENTER) self.getdata(d, logger)
async def websoket_handler(request): ws = web.WebSocketResponse() await ws.prepare(request) async for msg in ws: logger.info("client data") if msg.type == aiohttp.WSMsgType.TEXT: await ws.send_str(msg.data) elif msg.type == aiohttp.WSMsgType.ERROR: print("ws connection close with exception %s" % ws.exception()) logger.info("websocket connection closed") return ws
def __init__(self, *, full=False, default=True, threads=True): """Constructor threads Multithread support (default: True) Indicates whether you indend to use multiple python threads in your application. When set this flags disables some optimisations related to the GIL. (see GObject.threads_init()) full Full GLib (default: False) By default the policy is to create a GLibEventLoop object only for the main thread. Other threads will use regular asyncio event loops. If this flag is set, then this policy will use a glib event loop for every thread. Use this parameter if you want your loops to interact with modules written in other languages. default Use the default context (default: True) Indicates whether you want to use the GLib default context. If set, then the loop associated with the main thread will use the default (NULL) GLib context (instead of creating a new one). """ self._full = full self._default = default self._default_loop = None self._policy = unix_events.DefaultEventLoopPolicy() self._policy.new_event_loop = self.new_event_loop self.get_event_loop = self._policy.get_event_loop self.set_event_loop = self._policy.set_event_loop self.get_child_watcher = self._policy.get_child_watcher self._policy.set_child_watcher(GLibChildWatcher()) BaseGLibEventLoop.init_class() if threads: logger.info("GLib threads enabled") GObject.threads_init() else: logger.info("GLib threads not used") def __new__(cls, *k, **kw): raise RuntimeError( "GLib threads not enabled (you should use %s(threads=True)" % self.__class__.__name__) threading.Thread.__new__ = __new__
def configure_modify_delete_cdn(self, cimc_util_obj, config): ''' Logical ID: RACK-BIOS-DN-CDN-VIC-004 RACK-BIOS-DN-CDN-VIC-005 RACK-BIOS-DN-CDN-VIC-006 Test Case: To Configure CDN for vNIC To Modify CDN for vNIC To Delete CDN for vNIC ''' vic_list = [] vic_obj = VicLib(cimc_util_obj, config) vic_list = config.inventory_detail self.created_vnic_list = [] if cimc_util_obj.set_host_power('on') is False: self.failed('Failed to power on host', goto=['cleanup']) logger.info('VIC list: ' + str(vic_list)) for vic in vic_list: logger.info('vic slot number is: ' + vic.slot_number) slot_no = vic.slot_number '''creating 2 vNIC interface''' self.created_vnic_list = [] '''creating 2 vNIC interface''' dev_list = ['1', '2'] if create_vnic_interface(self, vic_obj, slot_no, dev_list) is not True: self.failed('Failed to create vnic interface', goto=['cleanup']) res1 = vic_obj.powercycle_and_verify_cdn_on_cimc_and_host(slot_no) ''' modify one vnic and delete one vnic and verify''' val = self.created_vnic_list[0] slot_no = val.split(' ')[0] dev_name = val.split(' ')[1] res = vic_obj.modify_vnic_properties(slot_no, dev_name, 'CDN', 'modified_' + dev_name) if res is not True: logger.error('Failed to modify the CDN name attribute of vNIC') else: self.created_vnic_list.append(slot_no + ' ' + dev_name) '''Deleting the vNIC interface''' val = self.created_vnic_list[1] slot_no = val.split(' ')[0] dev_name = val.split(' ')[1] if vic_obj.delete_vnic(slot_no, dev_name) != True: logger.error('Failed to delete vNIC ethernet interface ' + dev_name) res2 = vic_obj.powercycle_and_verify_cdn_on_cimc_and_host(slot_no) if res1 == False & res2 == False: self.failed('Test cases failed') else: self.passed('Test Case Passed')
def lom_cdn_test(self, cimc_util_obj, config): ''' Logical ID: RACK-BIOS-DN-CDN-003 Test Case: To Verify Consistent device naming structure for LOM Ports-RHEL 7.0 Pass/Fail Criteria: Device Naming should appear as specified in the Functional Spec ''' vic_obj = VicLib(cimc_util_obj, config) logger.info('Fetch the host managment interface IP') ntw_list = config.host_info[0].nw_intf_list logger.info('Management interface is:' + ntw_list[0].is_mgmt_intf) host_ip = get_host_mgmt_ip(config) res = cimc_util_obj.verify_host_up(host_ip, wait_time=400, wait_for_ping_fail=False) if res is False: logger.warning('Failed to ping the host after host reboot') else: logger.info("Host name IP pinging successfully") # connect to host cimc_util_obj.host_handle.connect() logger.info('Successfully connected to host') host_dict = vic_obj.host_cdn_mac_dict(cimc_util_obj.host_handle, 'biosdevname -d') cdn_name_from_host = [] for cdn_name_from_host1 in host_dict.values(): cdn_name_from_host.append(cdn_name_from_host1) if "LOMPort1" and "LOMPort2" in cdn_name_from_host: self.passed( "LOMPort1 and LOMPort2 available in Host OS, when CDN Enabled") else: self.passed( 'LOMPort1 and LOMPort2 are not available in Host OS, when CDN Enabled' )
def Compliance_validate(self,data,paras,logger): try: print("******************VERSION VALIDATION STARTS******************") logger.info("******************VERSION VALIDATION STARTS******************") paras=paras i=0 list2=[] global Label global IP global Installedversion global BaseVersion global RecommendedVersion global Compliancestatus while i<len(data): print(">>>>>Test " + str(i+1)+" for " + str(data[i][Label])) logger.info(">>>>>Test " + str(i+1)+" for " + str(data[i][Label])) list2.append(data[i][Label]) list2.append(data[i][IP]) list2.append(data[i][Installedversion]) list2.append(data[i][BaseVersion]) list2.append(data[i][recommendedVersion]) list2.append(data[i][Compliancestatus]) self.verify_label(paras,list2,logger) list2=[] i+=1 print("******************VERSION VALIDATION ENDS******************") logger.info("******************VERSION VALIDATION ENDS******************") return 0 except Exception as e: logger.info('Exception: %s', e) return 1
def get_textBVRV(self,d,list2,k,logger): try: global Label self.data = [] self.rowslist = [] BV = list2[3] RV = list2[4] Label = list2[0] self.rowslist = d.find_elements_by_xpath('.//table') for table in self.rowslist: self.data_row=[] self.tds=table.find_elements_by_tag_name('td') if self.tds: for td in self.tds: if td == list2[0]: BVlink=d.find_element_by_link_text(BV).get_attribute('href') RVlink=d.find_element_by_link_text(RV).get_attribute('href') print(BVlink) self.BVlinkclick(d,BVlink,BV,logger) print(RVlink) self.BVlinkclick(d,RVlink,BV,logger) if k[7] == BVlink and k[8]==RVlink: logger.info("SUCCESS: BV and RV links matched") else: logger.info("FAILED: BV and RV links not matched") except Exception as e: logger.info('FAILED while checking for the links') logger.info('Exception: %s', e)
def get_textBVRV(self, d, list2, k, logger): try: global Label self.data = [] self.rowslist = [] BV = list2[3] RV = list2[4] Label = list2[0] self.rowslist = d.find_elements_by_xpath('.//table') for table in self.rowslist: self.data_row = [] self.tds = table.find_elements_by_tag_name('td') if self.tds: for td in self.tds: if td == list2[0]: BVlink = d.find_element_by_link_text( BV).get_attribute('href') RVlink = d.find_element_by_link_text( RV).get_attribute('href') print(BVlink) self.BVlinkclick(d, BVlink, BV, logger) print(RVlink) self.BVlinkclick(d, RVlink, BV, logger) if k[7] == BVlink and k[8] == RVlink: logger.info("SUCCESS: BV and RV links matched") else: logger.info( "FAILED: BV and RV links not matched") except Exception as e: logger.info('FAILED while checking for the links') logger.info('Exception: %s', e)
def __init__(self, *, full=False, default=True, threads=True): """Constructor threads Multithread support (default: True) Indicates whether you indend to use multiple python threads in your application. When set this flags disables some optimisations related to the GIL. (see GObject.threads_init()) full Full GLib (default: False) By default the policy is to create a GLibEventLoop object only for the main thread. Other threads will use regular asyncio event loops. If this flag is set, then this policy will use a glib event loop for every thread. Use this parameter if you want your loops to interact with modules written in other languages. default Use the default context (default: True) Indicates whether you want to use the GLib default context. If set, then the loop associated with the main thread will use the default (NULL) GLib context (instead of creating a new one). """ self._full = full self._default = default self._default_loop = None self._policy = unix_events.DefaultEventLoopPolicy() self._policy.new_event_loop = self.new_event_loop self.get_event_loop = self._policy.get_event_loop self.set_event_loop = self._policy.set_event_loop self.get_child_watcher = self._policy.get_child_watcher self._policy.set_child_watcher(GLibChildWatcher()) BaseGLibEventLoop.init_class() if threads: logger.info("GLib threads enabled") GObject.threads_init() else: logger.info("GLib threads not used") def __new__(cls, *k, **kw): raise RuntimeError("GLib threads not enabled (you should use %s(threads=True)" % self.__class__.__name__) threading.Thread.__new__ = __new__
def setUpClass(cls): config_file_name = os.path.join(utils.getDlgDir(), "dlg.settings") if os.path.exists(config_file_name): config = configobj.ConfigObj(config_file_name) cls._temp = config.get("OS_X_TEMP") if not os.path.exists(DockerTests._temp): os.makedirs(DockerTests._temp) if cls._temp is None: cls._temp = "/tmp/daliuge_tfiles" os.environ["DLG_ROOT"] = cls._temp logger.info(f"Preparing pwd and group files in {utils.getDlgDir()}") _dum = utils.prepareUser(DLG_ROOT=utils.getDlgDir())
def fw_linux(self, cimc_util_obj, host_os_ip): cimc_util_obj.verify_host_up(hostname=host_os_ip, wait_for_ping_fail=False, wait_time=600) os_handle = cimc_util_obj.host_handle os_handle.connect() output = os_handle.execute_cmd('dmidecode -t202') newdict = {} rege = r'Strings\:\s*(\w[^\n\r]+)\s*([^\n\r]+)\s*([^\n\r]+)' for (x, y, z) in re.findall(rege, output): newdict[x] = z logger.info("New Dict") logger.info(newdict) return newdict
def output_fn(prediction_output, accept='application/json'): logger.info('Serializing the generated output.') classes = {0: 'Leopards', 1: 'airplanes', 2: 'butterfly', 3: 'camera', 4: 'elephant', 5: 'lamp', 6: 'rhino', 7: 'umbrella', 8: 'watch'} topk, topclass = prediction_output.topk(3, dim=1) result = [] for i in range(3): pred = {'prediction': classes[topclass.cpu().numpy()[0][i]], 'score': f'{topk.cpu().numpy()[0][i] * 100}%'} logger.info(f'Adding pediction: {pred}') result.append(pred) if accept == 'application/json': return json.dumps(result), accept raise Exception(f'Requested unsupported ContentType in Accept:{accept}')
def input_fn(request_body, content_type='application/json'): logger.info('Deserializing the input data.') if content_type == 'application/json': input_data = json.loads(request_body) url = input_data['url'] logger.info(f'Image url: {url}') image_data = Image.open(requests.get(url, stream=True).raw) image_transform = transforms.Compose([ transforms.Resize(size=256), transforms.CenterCrop(size=224), transforms.ToTensor(), transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) ]) return image_transform(image_data) raise Exception(f'Requested unsupported ContentType in content_type {content_type}')
def verify_label(self,parase,list2,logger): try: paras_list=list(parase) for i in range(len(paras_list)): Device=paras_list[i] if Device == 'Puremgr' or Device == 'Nagiosmgr': continue DevName=Device Device=self.getFromDict(parase,[Device]) Device_no=self.getFromDict(Device, ["Device_01"]) InitSetting=self.getFromDict(Device_no,["InitSetting"]) InitSetting=InitSetting[0] self.verify_status(InitSetting,list2,DevName,logger) except Exception as e: logger.info('FAILED while collecting data from config file. Failed for device '+str(DevName)+' from config file') logger.info('Exception: %s', e)
def tag(self): """Builds the actual AtomCollection data set for the provided file and then makes the call to Subler to actually write that metadata to the file. """ extension = os.path.splitext(self.file_name)[-1].lower() if extension not in self.supported_types: raise KeyError('Unsupported file type: {}'.format(extension)) for searcher in self.searchers: searcher(self.atoms).search(self.parser) tmp_file_name = '.tmp{}.m4v'.format(str(uuid4())) full_path = os.path.abspath(tmp_file_name) # determine explicitness explicit = self.atoms.pop('_explicit', Atom('', '')) if explicit.value.lower() != 'explicit': explicit = None else: explicit = explicit.value # create a subler instance for writing collected metadata to file subler = Subler(self.file_name, dest=full_path, explicit=explicit, media_kind=self.media_kind, metadata=self.atoms.atoms) LOGGER.info('Beginning Metadata tagging...') try: subler.tag() except subprocess.CalledProcessError as ex: if ex.returncode != 255: raise ex LOGGER.info('Metadata tagging copmlete. moving updated file') for tag, value in self.atoms.items(): if tag == 'Artwork' and os.path.exists(value): LOGGER.info('Deleting Temporary Artwork: {}'.format(value)) os.remove(value) LOGGER.info('Moving {} to trash'.format(self.file_name)) trash(self.file_name) file_name = os.path.basename(self.file_name) dest_path = self.file_name.replace(file_name, self.output_file_name) LOGGER.info('Moving {} to {}'.format(full_path, dest_path)) shutil.move(full_path, dest_path)
def model_fn(model_dir): device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') logger.info('Loading the model.') model = models.resnet50(pretrained=False) fc_inputs = model.fc.in_features model.fc = nn.Sequential( nn.Linear(fc_inputs, 2048), nn.ReLU(inplace=True), nn.Linear(2048, 10), nn.Dropout(0.4), nn.LogSoftmax(dim=1)) with open(os.path.join(model_dir, 'model_0.pth'), 'rb') as f: model.load_state_dict(torch.load(f)) model.to(device).eval() logger.info('Done loading model') return model
def _search(self, query): """Search Trakt for a TV episode matching *query*""" results = TVShow.search(query) self.filter_key = slugify(query) result = self.filter_results(query, results) if result is None: return self.context show = TVShow(result.slug) LOGGER.info('Trakt Search Result: %s', str(show)) self._apply_mapping(show) # Get general information about the show # Get episode specific data season_num = self.context.get('TV Season', None) if season_num is None: return self.context episode_num = self.context.get('TV Episode #') episode = TVEpisode(result.slug, season_num, episode_num) return self._get_episode(episode)
def Verify_Tech_support_report(self, cimc_util_obj, config): ''' Logical ID: RACK-BIOS-DN-CDN-VIC-022 Test Case: Check for CDN detail from Tech Support log Pass/Fail Criteria: CDN names and its details should appear correctly in Techsupport log Author: lakkris2 ''' vic_obj = VicLib(cimc_util_obj, config) vic_list = config.inventory_detail self.created_vnic_list = [] if cimc_util_obj.set_host_power('on') is False: self.failed('Failed to power on host', goto=['cleanup']) for vic in vic_list: logger.info('vic slot number is: ' + vic.slot_number) slot_no = vic.slot_number '''creating 2 vNIC interface''' dev_list = ['9', '10'] if create_vnic_interface(self, vic_obj, slot_no, dev_list) is not True: self.failed('Failed to create vnic interface', goto=['cleanup']) cimc_util_obj.power_cycle_host() host_ip = get_host_mgmt_ip(config) res = cimc_util_obj.verify_host_up(host_ip, wait_time=400, wait_for_ping_fail=False) if res is False: logger.warning('Failed to ping the host after host reboot') else: logger.info("Host name IP pinging successfully") out = cimc_util_obj.upload_techsupport_data(protocol='tftp') if out is False: self.failed('Failed to upload tech-support data', goto=['cleanup']) res = cimc_util_obj.validate_cdn_techsupport(config) if res is False: self.failed( 'Failed to verify that CDN info consistent across CIMC and VIC exported \ files', goto=['cleanup']) else: self.passed( 'Successfully verified that CDN info consistent across CIMC and VIC config' ) cimc_util_obj.remove_techsupport_file()
def verify_label(self, parase, list2, logger): try: paras_list = list(parase) for i in range(len(paras_list)): Device = paras_list[i] if Device == 'Puremgr' or Device == 'Nagiosmgr': continue DevName = Device Device = self.getFromDict(parase, [Device]) Device_no = self.getFromDict(Device, ["Device_01"]) InitSetting = self.getFromDict(Device_no, ["InitSetting"]) InitSetting = InitSetting[0] self.verify_status(InitSetting, list2, DevName, logger) except Exception as e: logger.info( 'FAILED while collecting data from config file. Failed for device ' + str(DevName) + ' from config file') logger.info('Exception: %s', e)
def download_from_s3(region='us-east-1', bucket="diabetic-retinopathy-data-from-radiology", s3_filename='test.png', local_path="/opt/ml/input/data"): if not path.exists(local_path): makedirs(local_path, mode=0o755, exist_ok=True) s3_client = boto3.client('s3', region_name=region, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key) try: s3_client.download_file(bucket, Key=s3_filename, Filename=local_path) except ClientError as e: if e.response['Error']['Code'] == "404": logger.info( f"The object s3://{bucket}/{s3_filename} in {region} does not exist." ) else: raise
def input_fn(request_body, request_content_type='application/json'): image_name = [] if request_content_type == 'application/json': input_object = json.loads(request_body) region = input_object['region'] logger.info('Downloading the input diabetic retinopathy data.') for i in range(10): try: img = input_object[f'img{str(i)}'] download_from_s3(region=region, bucket=bucket, s3_filename=img, local_path=data_dir) image_name.append(img) except KeyError as e: print(e) image_df = DataFrame(image_name, columns=['id_code']) image_paths = image_df['id_code'].apply( lambda x: image_with_name_in_dir(data_dir, x)) # Preprocessing the images dataset = run_image_preprocessing( params=params, apply_softmax=True, need_features=params['need_features'], image_df=image_df, image_paths=image_paths, batch_size=params['batch_size'], tta='fliplr', workers=num_workers, crop_black=True) return DataLoader(dataset, params['batch_size'], pin_memory=True, num_workers=num_workers) raise Exception( f'Requested unsupported ContentType in request_content_type {request_content_type}' )
def create_vnic_interface(self, vic_obj, slot_no, dev_list=[]): vnic_create = None #for dev in dev_list: for dev in dev_list: dev_name = 'eth_dev_' + dev + str(slot_no) cdnname = 'cdn_' + dev_name res = vic_obj.create_vnic(slot_no, dev_name, cdn_name=cdnname) if res is not True: logger.error('Failed to create vNIC interface for dev name: ' + dev_name) vnic_create = 'failed' else: logger.info('Successfully created vNIC with device name: ' + dev_name) self.created_vnic_list.append(slot_no + ' ' + dev_name) if vnic_create == 'failed': return False else: return True
def has_artwork(self, url): """Attempts to download artwork from the provided URL and write it to a .jpg file named '.albumart.jpg' then return True as long as a 2xx HTTP response is recieved. If an error should occur, nothing is downloaded and False is returned """ if not url: return False LOGGER.info('Downloading Album Artwork...') LOGGER.debug('URL: %s', url) req = requests.get(url) if 200 <= req.status_code < 300: file_name = '.albumart{}.jpg'.format(str(uuid4())) LOGGER.info('Writing artwork to %s', file_name) with open(file_name, 'wb') as f: f.write(req.content) return file_name.replace(' ', '\\ ') message = 'Album Art Not Downloaded: {}'.format(req.status_code) LOGGER.warn(message) return False
def cdn_enable_test(self, cimc_util_obj, config): ''' Logical ID: RACK-BIOS-DN-CDN-VIC-003 Test Case: To Check CDN is Enabled from CIMC ''' vic_list = config.inventory_detail logger.info('VIC list are: ' + str(vic_list)) bios_obj = cimc_util_obj.bios_util_obj host_util = HostUtils() boot_order_obj = BootOrder(cimc_util_obj) status = host_util.check_host_up(cimc_util_obj, boot_order_obj, config) if status is False: logger.warning( 'Host OS is not pinging after setting the boot order to HDD ') bios_token = 'cdnEnable' token_new_value = 'Enabled' logger.info('Power on the host') if cimc_util_obj.set_host_power('on') is False: self.failed('Failed to power on host', goto=['cleanup']) host_ip = get_host_mgmt_ip(config) token_val = bios_obj.get_bios_token_value(bios_token) if token_val == token_new_value: logger.info('CDN is already enabled on CIMC') else: logger.info('Set the cdnEnable token Enable and Reboot the host') res = bios_obj.set_bios_token_value(bios_token, token_new_value) if res is False: logger.error('Failed to set bios token value') self.failed('Failed to set bios token value', goto=['cleanup']) '''Wait for host to reboot''' res = cimc_util_obj.verify_host_up(host_ip, wait_for_ping_fail=False, wait_time=400) if res is False: logger.warning('Failed to ping the host after host reboot') self.failed('Failed to ping host', goto=['cleanup']) scope = 'advanced' token_val = bios_obj.get_bios_token_value(bios_token) if token_val == token_new_value: self.passed( 'Successfully verified that CDN token can be enabled from CIMC' ) else: self.failed('Failed to verify that CDN token,\ Expected is:' + token_new_value + ' But got as: ' + token_val)
def handle(self) -> int: from nr.util.plugins import iter_entrypoints, load_entrypoint if not self._validate_arguments(): return 1 if self.option("list"): for ep in iter_entrypoints( ChangelogUpdateAutomationPlugin.ENTRYPOINT): self.line(f' • {ep.name}') return 0 automation_plugin: ChangelogUpdateAutomationPlugin | None = None if plugin_name := self.option("use"): logger.info( 'Loading changelog update automation plugin <subj>%s</subj>', plugin_name) automation_plugin = load_entrypoint( ChangelogUpdateAutomationPlugin, plugin_name)() # type: ignore[misc] automation_plugin.io = self.io automation_plugin.initialize() base_revision: str = automation_plugin.get_base_ref()
def verify_export_vnic(self, cimc_util_obj, config, common_config): ''' Logical ID: RACK-BIOS-DN-CDN-VIC-017 Test Case: Export VIC Configuration and CDN Name Pass/Fail Criteria: CDN names and its details should appear in Exported file Author: lakkris2 ''' vic_obj = VicLib(cimc_util_obj, config) exp_obj = ExpImpUtils(cimc_util_obj, config, common_config) vic_list = config.inventory_detail self.created_vnic_list = [] if cimc_util_obj.set_host_power('on') is False: self.failed('Failed to power on host', goto=['cleanup']) for vic in vic_list: logger.info('vic slot number is: ' + vic.slot_number) slot_no = vic.slot_number '''creating 2 vNIC interface''' dev_list = ['4', '5'] if create_vnic_interface(self, vic_obj, slot_no, dev_list) is not True: self.failed('Failed to create vnic interface', goto=['cleanup']) out = exp_obj.export_vic_config(slot_no) if out is False: self.failed('Failed to export vic config', goto=['cleanup']) res = exp_obj.validate_vic_config(slot_no) if res is False: self.failed( 'Failed to verify that CDN info consistent across CIMC and VIC \ exported files', goto=['cleanup']) else: self.passed( 'Successfully verified that CDN info consistent across \ CIMC and VIC config') exp_obj.remove_vic_config()
def RVlinkclick(self,d,link,RV,logger): try: d.get(link) version1 = d.find_element_by_name(RV) if version1 == RV: logger.info("SUCCESS: Found the version in the opened link") else: logger.info("FAILED: Not found in the link") except Exception as e: logger.info('Exception: %s', e)
def RVlinkclick(self, d, link, RV, logger): try: d.get(link) version1 = d.find_element_by_name(RV) if version1 == RV: logger.info("SUCCESS: Found the version in the opened link") else: logger.info("FAILED: Not found in the link") except Exception as e: logger.info('Exception: %s', e)
def test_with_args(self): """Tests that delayed() works correctly with positional args""" delayed = self.delayed compute = self.compute logger.info(f"Running compute(delayed(sum_with_args)(1)), 1") self.assertEqual(compute(delayed(sum_with_args)(1)), 1) logger.info(f"Running compute(delayed(sum_with_args)(1, 20)), 21") self.assertEqual(compute(delayed(sum_with_args)(1, 20)), 21) logger.info(f"Running compute(delayed(sum_with_args)(1, 20, 30)), 51") self.assertEqual(compute(delayed(sum_with_args)(1, 20, 30)), 51)
def verify_status(self,parase,list2,Device,logger): try: k=[] global paras paras=parase self.Device=Device k.append(paras['label']) k.append(paras['ipv4_address']) k.append(paras['Version']) k.append(paras['BaseVersion']) k.append(paras['RecommendedVersion']) k.append(paras['Compliance']) k.append(paras['Link']) k.append(paras['BVUrl']) k.append(paras['RVUrl']) if list2[0]==k[0]: if list2[1]== k[1]: if list2[2]==k[2] and list2[3]==k[3] and list2[4]==k[4]: logger.info("SUCCESS: All versions are matched with the config file") if list2[5]== k[5]: logger.info("SUCCESS: Compliance status checked and validated") if(k[6] == 'True'): logger.info("SUCCESS: Device Not compliant: Checking with the links") print("checking urls for BV and RV") self.get_textBVRV(self.d,list2,k,logger) else: if list2[2] == list2[3] or list2[2]== list2[4]: print(list2[5]) logger.info('Marked FALSE as '+ str(list2[5])) else: logger.info("SUCCESS: Versions are not matching with the config file") if list2[5]=='Non Compliant': logger.info('SUCCESS: Compliance status checked and validated') if(k[6] == 'True'): logger.info("SUCCESS: Not compliant: checking with the links") print("checking urls for BV and RV") self.get_textBVRV(self.d,list2,k,logger) else: if list2[2] == list2[3] or list2[2]== list2[4]: print(list2[5]) logger.info('Marked FALSE as '+ str(list2[5])) else: if list2[5]=='Compliant': logger.info('FAILED: WRONG Compliance Status is Displayed') else: print('FAILED: IP Address not matching') logger.info('FAILED: IP Address not matching') #else: #print('FAILED: Label not matching') #logger.info('FAILED: Label not matching') except Exception as e: logger.info('Exception: %s', e) return 1