def test_load_test_bruker_image_file(self): # Test if the Bruker images file can be read in correctly file_path = 'image_bruker.001' wget.download(root_path + "/BrukerReader_Image.001?raw=true", out=file_path) data_translator = sr.BrukerAFMReader(file_path) datasets = data_translator.read(verbose=True) assert len( datasets ) == 8, "Length of dataset should be 8 but is instead {}".format( len(datasets)) for ind in range(len(datasets)): assert type(datasets[ind])== sidpy.sid.dataset.Dataset, "Dataset No. {} not read in as sidpy dataset" \ "but was instead read in as {}".format(ind, type(datasets[ind])) assert datasets[ind].shape == ( 512, 512 ), "Dataset[{}] is of size (256,256) but was read in as {}".format( ind, datasets[ind].shape) assert type(datasets[ind]._axes[0]) == sidpy.sid.dimension.Dimension, "Dataset should have dimension type " \ "of sidpy Dimension, but is instead {}".format(type(datasets[ind]._axes)) os.remove(file_path)
def get_images(self, download_folder): err = [] for i in range(9, 100): try: wget.download(self.get_url(i), fr'{download_folder}/') print('> ', i) time.sleep(1) except Exception as e: time.sleep(1) err.append(i) print('x ', i) print('data downloaded') print(f'error in {err}')
def get_demo_logfiles(): from pywget import wget base_url = "https://www2.informatik.hu-berlin.de/~naoth/ressources/log/demo_rc20_project/" demo_logfile = "rc18-htwk-naoth-h1-combined.mp4" target_dir = Path("data") Path.mkdir(target_dir, exist_ok=True) if not Path(target_dir / demo_logfile).is_file(): print("demo video file not found - will download it") wget.download(base_url + demo_logfile, target_dir) return str(target_dir / demo_logfile)
def download(cls, url, download_path=os.getcwd(), download_name=None, confirm=False): if sys.platform == 'win32': try: from idm import IDMan dm = IDMan() dm.download(url, download_path, download_name, confirm=confirm) except: from pywget import wget if download_name: print(make_colors("Download Name:", 'lw', 'bl') + " " + make_colors(download_name, 'lw', 'm')) download_path = os.path.join(download_path, download_name) wget.download(url, download_path) elif 'linux' in sys.platform: return cls.download_linux(url, download_path, download_name) else: print(make_colors("Your system not supported !", 'lw', 'lr', ['blink']))
def test_load_wrong_file(self): # Test behaviour of wrong data file file_name = wget.download(data_path + '/NionReader_Image_STO_HAADF.ndata') with self.assertRaises(TypeError): _ = SciFiReaders.DMReader(file_name) os.remove(file_name)
def test_read_spectrum_image(self): file_name = wget.download(data_path + '/EMDReader_SpectrumImage_Si.emd') emd_reader = EMDReader(file_name) datasets = emd_reader.read() emd_reader.close() self.assertIsInstance(datasets[0], sidpy.Dataset) self.assertTrue(datasets[0].data_type.name, 'IMAGE_STACK') self.assertTrue(datasets[0].data_type.name, 'SPECTRAL_IMAGE') self.assertTrue(datasets[1].ndim == 3) self.assertTrue(len(datasets) == 2) print(datasets[0].original_metadata) original_metadata = datasets[0].original_metadata self.assertTrue(datasets[0].units == 'counts') self.assertTrue(datasets[0].shape == (5, 16, 16)) self.assertTrue(datasets[1].shape == (512, 512, 4096)) self.assertEqual(float(datasets[0][1, 10, 10]), 23053.) self.assertEqual(float(datasets[0][3, 10, 10]), 23228.0) self.assertEqual(float(datasets[1][100, 100, 1000]), 0.0) self.assertEqual(float(datasets[1][50, 50, 1000]), 0.0) self.assertTrue(datasets[0].quantity == 'intensity') self.assertIsInstance(datasets[0].x, sidpy.Dimension) self.assertTrue( original_metadata['Core']['MetadataDefinitionVersion'] == '7.9') self.assertTrue( original_metadata['Instrument']['Manufacturer'] == 'FEI Company') self.assertTrue( original_metadata['Acquisition']['SourceType'] == 'XFEG') self.assertTrue( original_metadata['Optics']['AccelerationVoltage'] == '200000') os.remove(file_name)
def test_load_wrong_file(self): # Test behaviour of wrong data file file_name = wget.download(data_path + '/DMReader_Image_SI-Survey.dm3') reader = SciFiReaders.NionReader(file_name) datasets = reader.read() self.assertEqual(datasets.data_type.name, 'UNKNOWN') os.remove(file_name)
def download_linux(cls, url, download_path=os.getcwd(), saveas=None, downloader = 'aria2c'): ''' downloader: aria2c, wget, uget, persepolis ''' if cls.is_vimeo: downloader = 'wget' if sys.version_info.major == 3: aria2c = subprocess.getoutput("aria2c") else: aria2c = os.popen3("aria2c")[2].readlines()[0] if sys.version_info.major == 3: wget = subprocess.getoutput("wget") else: wget = os.popen3("wget")[2].readlines()[0] if sys.version_info.major == 3: persepolis = subprocess.getoutput("persepolis --help") else: persepolis = os.popen3("persepolis --help")[1].readlines()[0] if downloader == 'aria2c' and not re.findall("not found\n", aria2c): if saveas: saveas = '-o "{0}"'.format(saveas) cmd = 'aria2c -c -d "{0}" "{1}" {2} --file-allocation=none'.format(os.path.abspath(download_path), url, saveas) debug(cmd = cmd) os.system(cmd) elif downloader == 'wget' and not re.findall("not found\n", wget): if saveas: saveas = '-P "{0}" -O "{1}"'.format(os.path.abspath(download_path), saveas) else: saveas = '-P "{0}"'.format(os.path.abspath(download_path)) cmd = 'wget -c "{0}" {1}'.format(url, saveas) debug(cmd = cmd, debug = True) os.system(cmd) elif downloader == 'persepolis' and not re.findall("not found\n", persepolis): cmd = 'persepolis --link "{0}"'.format(url) debug(cmd = cmd) os.system(cmd) else: try: from pywget import wget as d d.download(url, download_path, saveas) except: print(make_colors("Can't Download this file !, no Downloader supported !", 'lw', 'lr', ['blink'])) clipboard.copy(url)
def saveNBimages (notebook=None, folder=None): """ Receives a Notebook and Folder directory, and collects and save its images locally. Parameters ---------- notebook: str Directory of a .ipynb file. folder: str Directory where images found are to be saved. """ try: f = open(notebook,"r") except: notebook=notebook+'.ipynb' f = open(notebook,"r") # open and read notebook (json object) data = f.read() jsonObj = json.loads(data) keylist = jsonObj.keys() itemslist = jsonObj.items() f.seek(0) # walks through the cells of the notebook length=len(jsonObj['cells']) for i in range(1,length-3): dict_=dict(jsonObj['cells'][i]) dict_keys=dict_.keys() index=0 if dict_['cell_type']=='markdown': # finds all image links in this cell source_ =str(dict_['source']).replace(',', '') nr_img=source_.count('img') index_end=0 for c in range(0, nr_img): link, index_end = findImageTag(source=source_[index_end:-1]) try: wget.download(link, folder) except: print('ALERT', notebook)
def download(self, url, download_path=os.getcwd(), confirm=False, use_wget=False): print( make_colors("DOWNLOAD PATH:", 'bl') + " " + make_colors(download_path, 'y')) if 'sourceforge' in url: return self.sourceforge(url, download_path) if use_wget: wget.download(url, download_path) else: try: idm = IDMan() idm.download(url, download_path, confirm=confirm) except: if sys.platform == 'win32': traceback.format_exc() wget.download(url, download_path)
def download(self, url, download_path=os.getcwd(), altname=None, prompt=False): print make_colors('start downloading ...', 'lr') try: import idm dm = idm.IDMan() dm.download(url, download_path, altname, confirm=prompt) except: traceback.format_exc() print make_colors("Internet Download Manager NOT FOUND !", 'lr', 'lw', ['blink']) print make_colors('Download with wget (buildin) ...', 'b', 'ly') if altname: download_path = os.path.join(download_path, altname) print make_colors("SAVE AS ", 'lc') + " : " + make_colors( download_path, 'lw', 'lr') wget.download(str(url), download_path)
def check_database(): path = os.path.join(os.getcwd(), 'database') # Check if exist the database directory if not os.path.exists(path): # Check if the database is empty print("---> This is the first run of PETAL, so download the database from Github!") # The database zip is downloaded from Github print('---> The zip download has started!') wget.download(gl.url_download_database, os.getcwd()) print("---> The zip download is complete!") # unzip with ZipFile(os.path.join(os.getcwd(), 'only_database.zip'), 'r') as zf: zf.extractall(os.getcwd()) print("---> The database is ready for use") os.remove(os.path.join(os.getcwd(), 'only_database.zip')) print("----- CHECK UPDATED PATHWAYS -----") db_info = json.load(open(os.path.join(os.getcwd(), 'database', 'db_info.json'))) delta_time = (datetime.now() - datetime.strptime(db_info['updated_at'], '%Y-%m-%d %H:%M:%S.%f')).seconds # Check if it has been more than 48 hours (172800 seconds) since the last check on the KEGG database if delta_time > 172800: print('---> It\'s been more than 48 hours since the last check!') # Check for updated pathways check_history_pathways(db_info['updated_at']) # Update the db_info.json file update_info_db(db_info['created_at']) # Load the complete list of genes (h**o sapiens) into memory gl.CSV_GENE_HSA = read_list_homo_sapiens_genes() print("----- LOADED LIST OF HUMAN GENES -----")
def test_load_dm4_file(self): file_name = wget.download(data_path + '/DMReader_EELS_STO.dm4') reader = SciFiReaders.DMReader(file_name, verbose=True) datasets = reader.read() self.assertEqual(datasets.title, 'DMReader_EELS_STO') self.assertEqual(datasets.source, 'SciFiReaders.DMReader') self.assertEqual(datasets[200], 135727.0) self.assertEqual(datasets.energy_loss[200], 400.0) self.assertEqual(datasets.original_metadata['DM']['dm_version'], 4) self.assertEqual( datasets.original_metadata['ImageList']['1']['ImageTags']['EELS'] ['Acquisition']['Exposure (s)'], 2.0) self.assertEqual(datasets.data_type.name, 'SPECTRUM') os.remove(file_name)
def test_load_test_bruker_force_file(self): # Test if the force curve file can be successfully read file_path = 'force_bruker.001' wget.download(root_path + "/BrukerReader_ForceCurve_Sapphire_TAP525.001?raw=true", out=file_path) data_translator = sr.BrukerAFMReader(file_path) datasets = data_translator.read(verbose=False) assert len( datasets ) == 2, "Length of dataset should be 2 but is instead {}".format( len(datasets)) for ind in range(len(datasets)): assert type(datasets[ind])== sidpy.sid.dataset.Dataset, "Dataset No. {} not read in as sidpy dataset" \ "but was instead read in as {}".format(ind, type(datasets[ind])) assert datasets[ind].shape[ 0] == 512, "Dataset[{}] is of size 512 but was read in as {}".format( ind, datasets[ind].shape[0]) assert type(datasets[ind]._axes[0]) == sidpy.sid.dimension.Dimension, "Dataset should have dimension type " \ "of sidpy Dimension, but is instead {}".format(type(datasets[ind]._axes)) os.remove(file_path)
def test_load_dm3_image(self): # Test if the test dm3 file can be read in correctly file_name = wget.download(data_path + '/DMReader_Image_SI-Survey.dm3') reader = SciFiReaders.DMReader(file_name) datasets = reader.read() self.assertEqual(datasets.title, 'DMReader_Image_SI-Survey') self.assertEqual(datasets.source, 'SciFiReaders.DMReader') self.assertEqual(datasets.data_type.name, 'IMAGE') self.assertEqual(datasets.shape, (512, 512)) self.assertEqual(float(datasets[3, 200]), 2940122.0) self.assertEqual(datasets.original_metadata['DM']['dm_version'], 3) self.assertEqual( datasets.original_metadata['ImageList']['1']['ImageTags'] ['DigiScan']['Flyback'], 500.0) os.remove(file_name)
def test_load_dm3_spectrum_image(self): # Test if the test dm3 file can be read in correctly file_name = wget.download(data_path + '/DMReader_SpectrumImage_SI-EELS.dm3') reader = SciFiReaders.DMReader(file_name) datasets = reader.read() self.assertEqual(datasets.title, 'DMReader_SpectrumImage_SI-EELS') self.assertEqual(datasets.source, 'SciFiReaders.DMReader') self.assertEqual(datasets.data_type.name, 'SPECTRAL_IMAGE') self.assertEqual(datasets.shape, (6, 49, 1024)) self.assertEqual(datasets[0, 3, 200], 2304.0) self.assertEqual(datasets.energy_loss[200], 450.0) self.assertEqual(datasets.original_metadata['DM']['dm_version'], 3) self.assertEqual( datasets.original_metadata['ImageList']['1']['ImageTags']['EELS'] ['Acquisition']['Exposure (s)'], 0.2) os.remove(file_name)
def test_load_nion_h5_file(self): # Test if the test h5 file can be read in correctly file_name = wget.download(data_path + '/NionReader_ImageStack_STO_HAADF.h5') reader = SciFiReaders.NionReader(file_name) datasets = reader.read() self.assertEqual(datasets.title, '10-Recording of SuperScan (HAADF)') self.assertEqual(datasets.source, 'NionReader') self.assertEqual(datasets.data_type.name, 'IMAGE_STACK') self.assertEqual(float(datasets[1, 200, 200]), 0.3707197606563568) self.assertEqual(float(datasets[13, 200, 200]), 0.392993688583374) self.assertEqual(float(datasets[17, 200, 200]), 0.4997090995311737) self.assertEqual(datasets.shape, (25, 512, 512)) self.assertEqual( datasets.original_metadata['dimensional_calibrations'][1], { 'offset': -4.0, 'scale': 0.015625, 'units': 'nm' }) os.remove(file_name)
def test_load_nion_ndata_file(self): # Test if the test ndata file can be read in correctly file_name = wget.download(data_path + '/NionReader_Image_STO_HAADF.ndata') reader = SciFiReaders.NionReader(file_name) datasets = reader.read() self.assertEqual(datasets.title, '19-SuperScan (HAADF) 9') self.assertEqual(datasets.source, 'NionReader') self.assertEqual(datasets.data_type.name, 'IMAGE') self.assertEqual(float(datasets[200, 200]), 0.3762475550174713) self.assertEqual(float(datasets[100, 200]), 0.35726848244667053) self.assertEqual(float(datasets[200, 100]), 0.42469730973243713) self.assertEqual(datasets.shape, (1024, 1024)) self.assertEqual( datasets.original_metadata['dimensional_calibrations'][1], { 'offset': -8.0, 'scale': 0.015625, 'units': 'nm' }) self.assertEqual( datasets.original_metadata['metadata']['hardware_source'] ['autostem']['high_tension_v'], 200000.0) os.remove(file_name)
def download(self, url): filename = wget.download(url, self.download_folder) os.rename(filename, filename+'.jpg')
def test_data_available(self): file_name = wget.download(data_path + '/EMDReader_Spectrum_FEI.emd') emd_reader = EMDReader(file_name) self.assertIsInstance(emd_reader, sidpy.Reader) emd_reader.close()
def test_read_spectrum(self): file_name = wget.download(data_path + 'EMDReader_Spectrum_FEI.emd') emd_reader = EMDReader(file_name) datasets = emd_reader.read() emd_reader.close() self.assertIsInstance(datasets[0], sidpy.Dataset) self.assertTrue(datasets[0].ndim == 1) self.assertTrue(len(datasets) == 1) print(datasets[0].original_metadata) self.assertTrue(datasets[0].units == 'counts') self.assertTrue(datasets[0].quantity == 'intensity') self.assertIsInstance(datasets[0].energy_scale, sidpy.Dimension) original_metadata = { 'Core': { 'MetadataDefinitionVersion': '7.9', 'MetadataSchemaVersion': 'v1/2013/07', 'guid': '00000000000000000000000000000000' }, 'Instrument': { 'ControlSoftwareVersion': '1.6.0', 'Manufacturer': 'FEI Company', 'InstrumentId': '6308', 'InstrumentClass': 'Talos', 'ComputerName': 'TALOS-D6308' }, 'Acquisition': { 'AcquisitionStartDatetime': { 'DateTime': '1488794225' }, 'AcquisitionDatetime': { 'DateTime': '0' }, 'BeamType': '', 'SourceType': 'XFEG' }, 'Optics': { 'GunLensSetting': '4', 'ExtractorVoltage': '4098.9010989010985', 'AccelerationVoltage': '200000', 'SpotIndex': '7', 'C1LensIntensity': '0.20658579468727112', 'C2LensIntensity': '0.3045177161693573', 'ObjectiveLensIntensity': '0.94855332374572754', 'IntermediateLensIntensity': '-0.078506767749786377', 'DiffractionLensIntensity': '0.40315291285514832', 'Projector1LensIntensity': '-0.95146656036376953', 'Projector2LensIntensity': '-0.92141127586364746', 'MiniCondenserLensIntensity': '-0.91795682907104492', 'ScreenCurrent': '2.4672221479801257e-010', 'LastMeasuredScreenCurrent': '2.4672221479801257e-010', 'FullScanFieldOfView': { 'x': '2.7148363970517e-006', 'y': '2.7148363970517e-006' }, 'Focus': '0', 'StemFocus': '0', 'Defocus': '0', 'HighMagnificationMode': 'None', 'Apertures': { 'Aperture-0': { 'Name': 'C1', 'Number': '1', 'MechanismType': 'Motorized', 'Type': 'Circular', 'Diameter': '0.002', 'Enabled': '0', 'PositionOffset': { 'x': '0.0012696000000000001', 'y': '0.0013899200000000002' } }, 'Aperture-1': { 'Name': 'C2', 'Number': '2', 'MechanismType': 'Motorized', 'Type': 'Circular', 'Diameter': '6.9999999999999994e-005', 'Enabled': '2', 'PositionOffset': { 'x': '0.00706064', 'y': '0.0013604800000000001' } }, 'Aperture-2': { 'Name': 'OBJ', 'Number': '4', 'MechanismType': 'Motorized', 'Type': 'None', 'PositionOffset': { 'x': '0.00014992', 'y': '-0.00050016000000000004' } }, 'Aperture-3': { 'Name': 'SA', 'Number': '5', 'MechanismType': 'Motorized', 'Type': 'Circular', 'Diameter': '0.00080000000000000004', 'Enabled': '3', 'PositionOffset': { 'x': '0.00061903999999999995', 'y': '0.00437376' } } }, 'OperatingMode': '2', 'TemOperatingSubMode': 'None', 'ProjectorMode': '1', 'EFTEMOn': 'false', 'ObjectiveLensMode': 'HM', 'IlluminationMode': 'None', 'ProbeMode': '1', 'CameraLength': '0.098000000000000004' }, 'EnergyFilter': { 'EntranceApertureType': '' }, 'Stage': { 'Position': { 'x': '-9.3740169600000009e-006', 'y': '0.00014370383231999999', 'z': '2.8805790000000001e-005' }, 'AlphaTilt': '0.00011072368774652029', 'BetaTilt': '0', 'HolderType': 'Single Tilt' }, 'Scan': { 'ScanSize': { 'width': '0', 'height': '0' }, 'MainsLockOn': 'false', 'FrameTime': '6.0416000000000007', 'ScanRotation': '1.6580627893946103' }, 'Vacuum': { 'VacuumMode': 'Ready' }, 'Detectors': { 'Detector-0': { 'DetectorName': 'BF', 'DetectorType': 'ScanningDetector', 'Inserted': 'false', 'Enabled': 'true' }, 'Detector-1': { 'DetectorName': 'DF2', 'DetectorType': 'ScanningDetector', 'Inserted': 'false', 'Enabled': 'true' }, 'Detector-2': { 'DetectorName': 'DF4', 'DetectorType': 'ScanningDetector', 'Inserted': 'false', 'Enabled': 'true' }, 'Detector-3': { 'DetectorName': 'HAADF', 'DetectorType': 'ScanningDetector', 'Inserted': 'true', 'Enabled': 'true' }, 'Detector-4': { 'DetectorName': 'SuperXG21', 'DetectorType': 'AnalyticalDetector', 'Inserted': 'true', 'Enabled': 'true', 'ElevationAngle': '0.38397244000000003', 'AzimuthAngle': '0.78539816339744828', 'CollectionAngle': '0.22500000000000001', 'Dispersion': '10', 'PulseProcessTime': '3.0000000000000001e-006', 'RealTime': '0.029570824999999999', 'LiveTime': '0.0259824552188541', 'InputCountRate': '0', 'OutputCountRate': '0', 'AnalyticalDetectorShutterState': '0', 'OffsetEnergy': '-1000', 'ElectronicsNoise': '31', 'BeginEnergy': '163' }, 'Detector-5': { 'DetectorName': 'SuperXG22', 'DetectorType': 'AnalyticalDetector', 'Inserted': 'true', 'Enabled': 'true', 'ElevationAngle': '0.38397244000000003', 'AzimuthAngle': '2.3561944901923448', 'CollectionAngle': '0.22500000000000001', 'Dispersion': '10', 'PulseProcessTime': '3.0000000000000001e-006', 'RealTime': '0.029381749999999998', 'LiveTime': '0.026721048183602016', 'InputCountRate': '0', 'OutputCountRate': '0', 'AnalyticalDetectorShutterState': '0', 'OffsetEnergy': '-1000', 'ElectronicsNoise': '31', 'BeginEnergy': '164' }, 'Detector-6': { 'DetectorName': 'SuperXG23', 'DetectorType': 'AnalyticalDetector', 'Inserted': 'true', 'Enabled': 'true', 'ElevationAngle': '0.38397244000000003', 'AzimuthAngle': '3.9269908169872414', 'CollectionAngle': '0.22500000000000001', 'Dispersion': '10', 'PulseProcessTime': '3.0000000000000001e-006', 'RealTime': '0.029267149999999999', 'LiveTime': '0.026188349677848218', 'InputCountRate': '0', 'OutputCountRate': '0', 'AnalyticalDetectorShutterState': '0', 'OffsetEnergy': '-1000', 'ElectronicsNoise': '31', 'BeginEnergy': '170' }, 'Detector-7': { 'DetectorName': 'SuperXG24', 'DetectorType': 'AnalyticalDetector', 'Inserted': 'true', 'Enabled': 'true', 'ElevationAngle': '0.38397244000000003', 'AzimuthAngle': '5.497787143782138', 'CollectionAngle': '0.22500000000000001', 'Dispersion': '10', 'PulseProcessTime': '3.0000000000000001e-006', 'RealTime': '0.029135249999999998', 'LiveTime': '0.025585437925304481', 'InputCountRate': '0', 'OutputCountRate': '0', 'AnalyticalDetectorShutterState': '0', 'OffsetEnergy': '-1000', 'ElectronicsNoise': '31', 'BeginEnergy': '169' }, 'Detector-8': { 'DetectorName': 'BM-Ceta', 'DetectorType': 'ImagingDetector' } }, 'BinaryResult': { 'AcquisitionUnit': '', 'CompositionType': '', 'Detector': 'SuperXG2', 'Encoding': '' }, 'Sample': '', 'GasInjectionSystems': '', 'CustomProperties': { 'Aperture[C1].Name': { 'type': 'string', 'value': '2000' }, 'Aperture[C2].Name': { 'type': 'string', 'value': '70' }, 'Aperture[OBJ].Name': { 'type': 'string', 'value': 'None' }, 'Aperture[SA].Name': { 'type': 'string', 'value': '800' }, 'Detectors[SuperXG21].BilatThresholdHi': { 'type': 'double', 'value': '0.00314897' }, 'Detectors[SuperXG21].KMax': { 'type': 'double', 'value': '180' }, 'Detectors[SuperXG21].KMin': { 'type': 'double', 'value': '120' }, 'Detectors[SuperXG21].PulsePairResolutionTime': { 'type': 'double', 'value': '5e-007' }, 'Detectors[SuperXG21].SpectrumBeginEnergy': { 'type': 'int32', 'value': '163' }, 'Detectors[SuperXG22].BilatThresholdHi': { 'type': 'double', 'value': '0.00312853' }, 'Detectors[SuperXG22].KMax': { 'type': 'double', 'value': '180' }, 'Detectors[SuperXG22].KMin': { 'type': 'double', 'value': '120' }, 'Detectors[SuperXG22].PulsePairResolutionTime': { 'type': 'double', 'value': '5e-007' }, 'Detectors[SuperXG22].SpectrumBeginEnergy': { 'type': 'int32', 'value': '164' }, 'Detectors[SuperXG23].BilatThresholdHi': { 'type': 'double', 'value': '0.00324329' }, 'Detectors[SuperXG23].KMax': { 'type': 'double', 'value': '180' }, 'Detectors[SuperXG23].KMin': { 'type': 'double', 'value': '120' }, 'Detectors[SuperXG23].PulsePairResolutionTime': { 'type': 'double', 'value': '5e-007' }, 'Detectors[SuperXG23].SpectrumBeginEnergy': { 'type': 'int32', 'value': '170' }, 'Detectors[SuperXG24].BilatThresholdHi': { 'type': 'double', 'value': '0.00319699' }, 'Detectors[SuperXG24].KMax': { 'type': 'double', 'value': '180' }, 'Detectors[SuperXG24].KMin': { 'type': 'double', 'value': '120' }, 'Detectors[SuperXG24].PulsePairResolutionTime': { 'type': 'double', 'value': '5e-007' }, 'Detectors[SuperXG24].SpectrumBeginEnergy': { 'type': 'int32', 'value': '169' }, 'StemMagnification': { 'type': 'double', 'value': '40000' } }, 'AcquisitionSettings': { 'encoding': 'uint16', 'bincount': '4096', 'StreamEncoding': 'uint16', 'Size': '1048576' } } self.assertDictEqual(datasets[0].original_metadata, original_metadata) array_100_200 = np.array([ 2.90475e+05, 2.17745e+05, 1.32847e+05, 4.95570e+04, 1.41030e+04, 2.60900e+03, 3.77000e+02, 6.70000e+01, 2.30000e+01, 1.50000e+01, 4.00000e+00, 6.00000e+00, 7.00000e+00, 6.00000e+00, 5.00000e+00, 6.00000e+00, 5.00000e+00, 1.00000e+01, 5.00000e+00, 2.10000e+01, 2.40000e+01, 6.00000e+01, 1.24000e+02, 1.99000e+02, 3.87000e+02, 4.99000e+02, 5.39000e+02, 5.09000e+02, 3.82000e+02, 2.62000e+02, 1.19000e+02, 4.30000e+01, 2.40000e+01, 3.00000e+00, 1.00000e+00, 3.00000e+00, 3.00000e+00, 5.00000e+00, 2.00000e+00, 4.00000e+00, 4.00000e+00, 3.00000e+00, 4.00000e+00, 3.00000e+00, 4.00000e+00, 4.00000e+00, 7.00000e+00, 1.30000e+01, 1.60000e+01, 2.20000e+01, 1.80000e+01, 3.20000e+01, 1.80000e+01, 2.10000e+01, 1.90000e+01, 9.00000e+00, 3.00000e+00, 1.00000e+00, 0.00000e+00, 1.00000e+00, 1.00000e+00, 1.00000e+00, 2.00000e+00, 0.00000e+00, 4.00000e+00, 1.00000e+00, 2.00000e+00, 1.00000e+00, 2.00000e+00, 1.00000e+00, 2.00000e+00, 0.00000e+00, 1.00000e+00, 2.00000e+00, 2.00000e+00, 2.00000e+00, 3.00000e+00, 1.00000e+00, 2.00000e+00, 0.00000e+00, 8.00000e+00, 3.00000e+00, 0.00000e+00, 4.00000e+00, 0.00000e+00, 3.00000e+00, 2.00000e+00, 2.00000e+00, 1.00000e+00, 2.00000e+00, 4.00000e+00, 3.00000e+00, 3.00000e+00, 9.00000e+00, 3.00000e+00, 7.00000e+00, 3.00000e+00, 2.00000e+00, 2.00000e+00, 1.00000e+00 ]) self.assertTrue( np.allclose(np.array(datasets[0])[100:200], array_100_200, rtol=1e-5, atol=1e-2)) os.remove(file_name)
def test_load_nanonis_sxm(self): file_path = 'NanonisSXM.sxm' wget.download(root_path + "NanonisReader_COOx_sample2286.sxm?raw=true", out=file_path) reader = sr.NanonisSXMReader(file_path) datasets = reader.read() os.remove(file_path) assert len(datasets)==20, "Length of dataset should be 20 but is instead {}".format(len(datasets)) for ind in range(20): assert type(datasets[ind]) == sidpy.sid.dataset.Dataset, "Type of dataset expected \ is sidpy.Dataset, received {}".format(type(datasets[ind])) assert datasets[ind].shape == (256,256), "Shape of dataset should be (256,256) but instead is {}".format(datasets[ind].shape) metadata = datasets[0].metadata original_metadata = {'Channel': '14', 'Name': 'Z', 'Unit': 'm', 'Direction': 'forward', 'Calibration': '-1.260E-7', 'Offset': '0.000E+0', 'nanonis_version': '2', 'scanit_type': 'FLOAT MSBFIRST', 'rec_date': '09.07.2020', 'rec_time': '13:16:37', 'rec_temp': '290.0000000000', 'acq_time': 616.1, 'scan_pixels': np.array([256, 256]), 'scan_file': 'C:\\Users\\Administrator\\Documents\\Users\\Kevin Pachuta\\063020\\COOx_sample2286.sxm', 'scan_time': np.array([1.203, 1.203]), 'scan_range': np.array([2.5e-07, 2.5e-07]), 'scan_offset': np.array([1.182551e-06, 1.858742e-06]), 'scan_angle': '9.000E+1', 'scan_dir': 'up', 'bias': 0.0, 'z-controller': {'Name': ('cAFM',), 'on': ('1',), 'Setpoint': ('2.000E+0 V',), 'P-gain': ('5.167E-9 m/V',), 'I-gain': ('3.059E-5 m/V/s',), 'T-const': ('1.689E-4 s',)}, 'comment': 'New sample from Kevin CoOx nanosheets', 'nanonismain>session path': 'C:\\Users\\Administrator\\Documents\\Users\\Kevin Pachuta\\063020', 'nanonismain>sw version': 'Generic 4', 'nanonismain>ui release': '8181', 'nanonismain>rt release': '7685', 'nanonismain>rt frequency (hz)': '10E+3', 'nanonismain>signals oversampling': '10', 'nanonismain>animations period (s)': '20E-3', 'nanonismain>indicators period (s)': '300E-3', 'nanonismain>measurements period (s)': '500E-3', 'bias>bias (v)': '0E+0', 'bias>calibration (v/v)': '1E+0', 'bias>offset (v)': '0E+0', 'current>current (a)': '-185.299E-15', 'current>calibration (a/v)': '999.99900E-12', 'current>offset (a)': '-353.221E-15', 'current>gain': 'High', 'piezo calibration>active calib.': 'Default', 'piezo calibration>calib. x (m/v)': '15E-9', 'piezo calibration>calib. y (m/v)': '15E-9', 'piezo calibration>calib. z (m/v)': '-9E-9', 'piezo calibration>hv gain x': '14', 'piezo calibration>hv gain y': '14', 'piezo calibration>hv gain z': '14', 'piezo calibration>tilt x (deg)': '0', 'piezo calibration>tilt y (deg)': '0', 'piezo calibration>curvature radius x (m)': 'Inf', 'piezo calibration>curvature radius y (m)': 'Inf', 'piezo calibration>2nd order corr x (v/m^2)': '0E+0', 'piezo calibration>2nd order corr y (v/m^2)': '0E+0', 'piezo calibration>drift x (m/s)': '0E+0', 'piezo calibration>drift y (m/s)': '0E+0', 'piezo calibration>drift z (m/s)': '0E+0', 'piezo calibration>drift correction status (on/off)': 'FALSE', 'z-controller>z (m)': '109.389E-9', 'z-controller>controller name': 'cAFM', 'z-controller>controller status': 'ON', 'z-controller>setpoint': '2E+0', 'z-controller>setpoint unit': 'V', 'z-controller>p gain': '5.16746E-9', 'z-controller>i gain': '30.5931E-6', 'z-controller>time const (s)': '168.909E-6', 'z-controller>tiplift (m)': '0E+0', 'z-controller>switch off delay (s)': '0E+0', 'scan>scanfield': '1.18255E-6;1.85874E-6;250E-9;250E-9;90E+0', 'scan>series name': 'COOx_sample2', 'scan>channels': 'Current (A);Vert. Deflection (V);Horiz. Deflection (V);Amplitude2 (V);Phase 2 (V);Bias (V);Z (m);Phase (deg);Amplitude (m);Frequency Shift (Hz)', 'scan>pixels/line': '256', 'scan>lines': '256', 'scan>speed forw. (m/s)': '207.779E-9', 'scan>speed backw. (m/s)': '207.779E-9'} data_descriptors = ['Z (m)', 'Z (m)', 'Vert._Deflection (V)', 'Vert._Deflection (V)', 'Horiz._Deflection (V)', 'Horiz._Deflection (V)', 'Amplitude2 (V)', 'Amplitude2 (V)', 'Phase_2 (V)', 'Phase_2 (V)', 'Bias (V)', 'Bias (V)', 'Current (A)', 'Current (A)', 'Phase (deg)', 'Phase (deg)', 'Amplitude (m)', 'Amplitude (m)', 'Frequency_Shift (Hz)', 'Frequency_Shift (Hz)'] for ind in range(20): data_descriptor = datasets[ind].data_descriptor assert data_descriptor == data_descriptors[ind], "Expected data descriptor {} \ but received {}".format(data_descriptors[ind], data_descriptor) for key in original_metadata: if type(original_metadata[key]) == np.ndarray: assert original_metadata[key].all() == metadata[key].all(), "Metadata incorrect for key {}, should be {} " \ "but was read as {}".format(key, original_metadata[key], metadata[key]) else: assert original_metadata[key] == metadata[key], "Metadata incorrect for key {}, should be {} " \ "but was read as {}".format(key, original_metadata[key], metadata[key])
def test_load_test_dat_file(self): # Test if the test dat file can be read in correctly file_path = 'Bias-Spectroscopy.dat' wget.download(root_path + "NanonisReader_BiasSpectroscopy.dat?raw=true", out=file_path) data_translator = sr.NanonisDatReader(file_path) datasets = data_translator.read(verbose=False) os.remove(file_path) assert len(datasets)==24, "Length of dataset should be 24 but is instead {}".format(len(datasets)) metadata = datasets[0].metadata original_metadata ={'Experiment': 'bias spectroscopy', 'Date': '07.07.2020 15:01:50', 'User': '', 'X (m)': 1.10123e-06, 'Y (m)': 1.89724e-06, 'Z (m)': 9.92194e-08, 'Z offset (m)': 0.0, 'Settling time (s)': 0.0002, 'Integration time (s)': 0.0006, 'Z-Ctrl hold': 'TRUE', 'Final Z (m)': 'N/A', 'Filter type': 'Gaussian', 'Order': 2.0, 'Cutoff frq': ''} data_descriptors = ['Current (A)', 'Vert. Deflection (V)', 'X (m)', 'Y (m)', 'Z (m)', 'Excitation (V)', 'Current [bwd] (A)', 'Vert. Deflection [bwd] (V)', 'X [bwd] (m)', 'Y [bwd] (m)', 'Z [bwd] (m)', 'Excitation [bwd] (V)', 'Current (A)', 'Vert. Deflection (V)', 'X (m)', 'Y (m)', 'Z (m)', 'Excitation (V)', 'Current (A)', 'Vert. Deflection (V)', 'X (m)', 'Y (m)', 'Z (m)', 'Excitation (V)'] dim0_values = [datasets[ind].dim_0.values for ind in range(len(datasets))] for key in original_metadata: assert original_metadata[key] == metadata[key], "Metadata incorrect for key {}, should be {} " \ "but was read as {}".format(key, original_metadata[key], metadata[key]) for ind in range(len(datasets)): assert type(datasets[ind])== sidpy.sid.dataset.Dataset, "Dataset No. {} not read in as sidpy dataset" \ "but was instead read in as {}".format(ind, type(datasets[ind])) assert datasets[ind].labels == ['Voltage (V)'], "Dataset {} label should be a ['Voltage (V)'] but " \ "is instead {}".format(ind,datasets[ind].labels) assert datasets[ind].data_descriptor == data_descriptors[ind], "data descriptor " \ "for dataset [{}] is {} but should be {}".format(ind, datasets[ind].data_descriptor,data_descriptors[ind]) assert datasets[ind].shape[0]==256, "Dataset[{}] is of size 256 but was read in as {}".format(ind, datasets[ind].shape[0]) assert type(datasets[ind]._axes[0]) == sidpy.sid.dimension.Dimension, "Dataset should have dimension type " \ "of sidpy Dimension, but is instead {}".format(type(datasets[ind]._axes)) assert datasets[ind].dim_0.values.all() == dim0_values[ind].all(), "Dimension 0 for dataset {} did not match!".format(ind)