def prepare_test(datasetDir, padSize=0, shuffle=True, scaleFactor=None, logPath='.', isTest=True): """ Function that loads 3D medical image data and prepare it for training Arguments: - datasetDir: The directory that contains all dataset images - padSize: The number of voxels to pad `Default`: zero (no padding) - ScaleFactor: Defines the scale of the data (0.5 -> 1/2 size) `Default`: None (No scale) """ img_addrs = [] imgs = glob.glob(os.path.join(datasetDir, '*.nii.gz')) for img in imgs: img_addrs.append(img) train_list_img = img_addrs dTrain = load_images(train_list_img, padSize, scaleFactor, isTest=isTest) save_list(train_list_img, logPath + '/reports/train_list_images.txt') myPrint('------------< Dataset Info >------------', path=logPath) myPrint('...Train images: {0}'.format(len(dTrain)), path=logPath) return dTrain, img_addrs
def createBucket(self, bucket_name): if not self.client.bucket_exists(bucket_name): if conf.region is not None: self.client.make_bucket(bucket_name) else: self.client.make_bucket(bucket_name) else: myPrint("Bucket with name " + bucket_name + " already exists", 11)
def bucketExists(self, bucket_name): try: if self.client.bucket_exists(bucket_name): return True else: return False except InvalidResponseError as e: myPrint(e) return False
def getUin(self, vid): myPrint("Get uin from vid api called") url = '%s/idrepository/v1/identity/idvid/%s' % (self.server, vid) cookies = {'Authorization': self.token} r = requests.get(url, cookies=cookies, verify=self.ssl_verify) resp = self.parseResponse(r) if conf.debug: myPrint("Response: " + dictToJson(resp)) return resp
def run(self): packets = [] ignored = [] bucket_names = getJsonFile(bucketListPath) for bucket_name in bucket_names: if regMatch(r"^[1-9]\d{28}$", bucket_name): packets.append(bucket_name) else: ignored.append(bucket_name) writeJsonFile(packetListPath, packets) writeJsonFile(ignoredBucketListPath, ignored) myPrint("Total " + str(len(packets)) + " packets found") myPrint("Total " + str(len(ignored)) + " ignored buckets found")
def run(self): self.m.createBucket(conf.new_bucket_name) packet_names = getJsonFile(packetListPath) myPrint("Total " + str(len(packet_names)) + " packets found", 12) if conf.records is not None: packet_names = packet_names[0:conf.records] packet_names_chunks = chunkIt(packet_names, conf.threads) i = 0 for packet_names_chunk in packet_names_chunks: myPrint("Chunk " + str(i + 1) + ": total packet to be migrated are " + str(len(packet_names_chunk))) pool = Pool(conf.threads) pool.map(runner, packet_names_chunks)
def deleteBucket(self): bucket_name = "my-test-bucket" myPrint("Fetching objects level 1 list") myPrint("Total objects level 1 " + str(len(self.listObjects(bucket_name, False)))) myPrint("Fetching object recursive list") object_names = self.listObjects(bucket_name, True) removed_objects = [] myPrint("Total objects " + str(len(object_names))) for obj_name in object_names: removed_objects.append(DeleteObject(obj_name)) errors = self.client.remove_objects(bucket_name, removed_objects) for error in errors: myPrint(error, 11) self.client.remove_bucket(bucket_name)
def __init__(self, warp, args, ioer, emlist): self.warp = warp self.args = args self.ioer = ioer self.emlist = emlist self.half = args.half if self.half: self.dtype = torch.half else: self.dtype = torch.float if isinstance(self.warp, DataParallel): self.net = self.warp.module.net else: self.net = self.warp.net self.lrfun = LRLoader.load(args.train['lr_func']) self.lr_arg = args.train['lr_arg'] self.epoch = args.train['epoch'] self.optimizer = self.__get_opimizer() self.save_dir = os.path.join(args.output['result_dir'], args.output['save_dir']) self.printf = myPrint(os.path.join(self.save_dir, "log.txt")) # print('*' * 10) # print(self.save_dir) # print('*' * 10) self.margin = np.array(args.prepare['margin']) self.cropsize = np.array(args.prepare['crop_size']) self.small_size = args.rpn['small_size'] if 'pos_weight_thresh' in args.prepare: self.pos_weight_thresh = args.prepare['pos_weight_thresh'] else: self.pos_weight_thresh = 0
def checkHash(self, packet_name): myPrint("Migrating " + packet_name, 3) bucketObjects = self.m.listObjects(packet_name, True) newBucketObjects = self.m.listObjects(conf.new_bucket_name, True, "/" + packet_name) for obj in bucketObjects: for new_obj in newBucketObjects: if getLastPath(obj) == getLastPath(new_obj): o1 = self.m.getObject(packet_name, obj) o2 = self.m.getObject(conf.new_bucket_name, new_obj) h1 = getHash(o1) myPrint("Hash of " + obj + ": " + h1) h2 = getHash(o2) myPrint("Hash of " + new_obj + ": " + h2) if h1 == h2: myPrint("Hashes match") else: myPrint("Hashes not match") raise RuntimeError("Hashes not match")
def migrate(self, packet_name): myPrint("Migrating " + packet_name, 3) objects = self.m.listObjects(packet_name, recursive=True) for obj in objects: new_obj = packet_name + "/" + obj myPrint("Copying from " + packet_name + " -> " + obj) myPrint("Copying to " + conf.new_bucket_name + " -> " + new_obj) self.m.copyObject(conf.new_bucket_name, new_obj, packet_name, obj)
def prepare_dataset(datasetDir, split=0.9, padSize=0, shuffle=True, scaleFactor=None, dataFraction='full', logPath='.'): """ Function that loads 3D medical image data and prepare it for training Arguments: - datasetDir: The directory that contains all dataset images - split: The ratio (0-1) of images for the training data - padSize: The number of voxels to pad `Default`: zero (no padding) - shuffle_list: Defines if the list of files should be shuffled or not `Default`: True (shuffle the images list) - ScaleFactor: Defines the scale of the data (0.5 -> 1/2 size) `Default`: None (No scale) """ datasets = os.listdir(datasetDir) img_addrs = [] msk_addrs = [] for dataset in datasets: imgs = glob.glob( os.path.join(datasetDir + '/' + dataset, '*-patient_*.nii.gz')) msks = glob.glob( os.path.join(datasetDir + '/' + dataset, '*-liver_*.nii.gz')) for img in imgs: img_addrs.append(img) for msk in msks: msk_addrs.append(msk) # Second method for Trian/Valid images list creation train_list_img, valid_list_img, train_list_msk, valid_list_msk = split_list( img_addrs, msk_addrs, split=split, shuffleList=shuffle) if dataFraction == 'half': train_list_img = train_list_img[0:int(len(train_list_img) / 2)] valid_list_img = valid_list_img[0:int(len(valid_list_img) / 2)] train_list_msk = train_list_msk[0:int(len(train_list_msk) / 2)] valid_list_msk = valid_list_msk[0:int(len(valid_list_msk) / 2)] dTrain = load_images(train_list_img, padSize, scaleFactor) mTrain = load_images(train_list_msk, padSize, scaleFactor) save_list(train_list_img, logPath + '/reports/train_list_images.txt') save_list(train_list_msk, logPath + '/reports/train_list_masks.txt') myPrint('------------< Dataset Info >------------', path=logPath) myPrint('...Train images: {0}'.format(len(dTrain)), path=logPath) dValid = load_images(valid_list_img, padSize, scaleFactor) mValid = load_images(valid_list_msk, padSize, scaleFactor) save_list(valid_list_img, logPath + '/reports/valid_list_images.txt') save_list(valid_list_msk, logPath + '/reports/valid_list_masks.txt') myPrint('...Validation images: {0}'.format(len(dValid)), path=logPath) return dTrain, mTrain, dValid, mValid
def test_getObject(self): m = MinioWrapper() objects = [ 'RESIDENT/RES_UPDATE/10001100010002420210223073024_evidence', 'RESIDENT/RES_UPDATE/10001100010002420210223073024_id', 'RESIDENT/RES_UPDATE/10001100010002420210223073024_optional' ] myPrint(m.bucketExists("10001100010002420210223073024")) myPrint(m.listObjects("10001100010002420210223073024", recursive=True)) myPrint(m.listObjects("10001100010002420210223073024", recursive=False)) myPrint( m.copyObject( "my-test-bucket", 'RESIDENT/RES_PDATE/10001100010002420210223073024_evidence', "10001100010002420210223073024", "RESIDENT/RES_UPDATE/10001100010002420210223073024_evidence"))
def __init__(self, warp, args, ioer, emlist): self.warp = warp self.args = args self.ioer = ioer self.emlist = emlist self.half = args["half"] self.dtype = torch.float self.splitcomb = SplitComb(args) if isinstance(self.warp, DataParallel): self.net = self.warp.module.net else: self.net = self.warp.net self.lrfun = LRLoader.load(args["train"]["lr_func"]) self.lr_arg = args["train"]["lr_arg"] self.epoch = args["train"]["epoch"] self.optimizer, self.scheduler = self.__get_opimizer( args["train"]["start_epoch"] - 1) self.save_dir = os.path.join(args["output"]["result_dir"], args["output"]["save_dir"]) self.writer = sitk.ImageFileWriter() testdir = args["output"]["test_dir"] if testdir is None: self.testdir = os.path.join(self.save_dir, "testout") else: self.testdir = os.path.join(self.save_dir, testdir) if not os.path.exists(self.testdir): os.mkdir(self.testdir) if 'choose_top1_connect_region' in args['prepare']: self.choose_top1 = args['prepare']['choose_top1_connect_region'] else: self.choose_top1 = False if 'choose_topk_vessel_connect_region' in args['prepare']: self.choose_topk = args['prepare'][ 'choose_topk_vessel_connect_region'] else: self.choose_topk = False self.printf = myPrint(os.path.join(self.save_dir, "log.txt"))
def credentialRequest(self, request): myPrint("addApplication api called") url = '%s/v1/credentialrequest/requestgenerator' % self.server cookies = {'Authorization': self.token} ts = getTimestamp() j = { "id": "mosip.credentialrequest", "request": request, "requesttime": ts, "version": "1.0" } if conf.debug: myPrint("Request: " + dictToJson(j)) r = requests.post(url, cookies=cookies, json=j, verify=self.ssl_verify) resp = self.parseResponse(r) if conf.debug: myPrint("Response: " + dictToJson(resp)) return resp
def prepare_dataset(datasetDir, split=0.8, padSize=0, shuffle=True, scaleFactor=None, logPath='.'): """ Function that loads 3D medical image data and prepare it for training Arguments: - datasetDir: The directory that contains all dataset images - split: The ratio (0-1) of images for the training data - padSize: The number of voxels to pad `Default`: zero (no padding) - shuffle_list: Defines if the list of files should be shuffled or not `Default`: True (shuffle the images list) - ScaleFactor: Defines the scale of the data (0.5 -> 1/2 size) `Default`: None (No scale) """ # img_paths = r'./Dataset/*.nii.gz' img_paths = os.path.join(datasetDir, '*.nii.gz') img_addrs = glob.glob(img_paths) # Second method for Trian/Valid images list creation training_list, validation_list = split_list(img_addrs, split=split, shuffleList=shuffle) dTrain = load_images(training_list, padSize, scaleFactor) save_list(training_list, logPath + '/reports/training_list.txt') myPrint('------------< Dataset Info >------------', path=logPath) myPrint('...Train images: {0}'.format(len(dTrain)), path=logPath) dValid = load_images(validation_list, padSize, scaleFactor) save_list(validation_list, logPath + '/reports/validation_list.txt') myPrint('...Validation images: {0}'.format(len(dValid)), path=logPath) return dTrain, dValid, training_list, validation_list
def authGetToken(self, appid, username, pwd): myPrint("authenticate api called") url = '%s/v1/authmanager/authenticate/clientidsecretkey' % self.server ts = getTimestamp() j = { "id": "mosip.io.clientId.pwd", "metadata": {}, "version": "1.0", "requesttime": ts, "request": { "appId": appid, "clientId": username, "secretKey": pwd } } if conf.debug: myPrint("Request: " + dictToJson(j)) r = requests.post(url, json=j, verify=self.ssl_verify) resp = self.parseResponse(r) if conf.debug: myPrint("Response: " + dictToJson(resp)) token = readToken(r) return token
from buzz import buzz from nikeBG import nikeBG from utils import myPrint in_ = input('ime: ') myPrint(buzz(in_)) myPrint(nikeBG(in_))
def link_clicked(self, url): if self.waiting: return utils.myPrint('openLink : ', url) QtGui.QDesktopServices.openUrl(url)
def main(): args, parser = args_parse() initLogger(logPath) start_time = getTimeInSec() myPrint(conf.minio_endpoint) try: prev_time = start_time if args.action == 'get_buckets' or args.action == 'all': myPrint("Action: get_buckets", 1) GetBuckets().run() prev_time, prstr = timeDiff(prev_time) myPrint("Time taken by Action get_buckets: " + prstr, 11) if args.action == 'find_packets' or args.action == 'all': myPrint("Action: find_packets", 1) FindPackets().run() prev_time, prstr = timeDiff(prev_time) myPrint("Time taken by Action find_packets: " + prstr, 11) if args.action == 'migrate' or args.action == 'all': myPrint("Action: migrate", 1) Migration().run() prev_time, prstr = timeDiff(prev_time) myPrint("Time taken by Action migrate: " + prstr, 11) if args.action == 'get_records' or args.action == 'all': myPrint("Action: get_records", 1) m = MinioWrapper() objs = m.listObjects(conf.new_bucket_name, False) new_objs = [] for ob in objs: new_objs.append(ob.replace("/", "")) writeJsonFile(migratedPackets, new_objs) myPrint("Total objects level 1 " + str(len(new_objs))) prev_time, prstr = timeDiff(prev_time) myPrint("Time taken by Action get_records: " + prstr, 11) except: prev_time, prstr = timeDiff(start_time) myPrint("Total time taken by the script: " + prstr, 11) formatted_lines = traceback.format_exc() myPrint(formatted_lines, 13) sys.exit(1) prev_time, prstr = timeDiff(start_time) myPrint("Total time taken by the script: " + prstr, 11) return sys.exit(0)
def main(): args, parser = args_parse() initLogger(logPath) start_time = getTimeInSec() myPrint(conf.minio_endpoint) try: prev_time = start_time if args.action == 'check_conn' or args.action == 'all': myPrint("Action: check minio connection", 1) m = MinioWrapper() myPrint(m.bucketExists("my-test-bucket")) prev_time, prstr = timeDiff(prev_time) myPrint("Time taken by Action check_conn: " + prstr, 11) if args.action == 'remove_bucket' or args.action == 'all': myPrint("Action: remove_bucket test", 1) m = MinioWrapper() myPrint(m.deleteBucket()) prev_time, prstr = timeDiff(prev_time) myPrint("Time taken by Action remove_bucket: " + prstr, 11) if args.action == 'check_hash' or args.action == 'all': myPrint("Action: check_hash test", 1) packet_names = getJsonFile(hashCheckPacketsPacket) for packet in packet_names: myPrint("Packet name: " + packet, 2) Migration().checkHash(packet) prev_time, prstr = timeDiff(prev_time) myPrint("Time taken by Action check_hash: " + prstr, 11) if args.action == 'check_records' or args.action == 'all': myPrint("Action: check_records", 1) total_buckets = getJsonFile(bucketListPath) total_packets = getJsonFile(packetListPath) total_ignored = getJsonFile(ignoredBucketListPath) total_migrated = getJsonFile(migratedPackets) myPrint("total_buckets: " + str(len(total_buckets))) myPrint("total_packets: " + str(len(total_packets))) myPrint("total_ignored: " + str(len(total_ignored))) myPrint("total_migrated: " + str(len(total_migrated))) myPrint(list(set(total_packets) - set(total_migrated))) prev_time, prstr = timeDiff(prev_time) myPrint("Time taken by Action check_records: " + prstr, 11) except: prev_time, prstr = timeDiff(start_time) myPrint("Total time taken by the script: " + prstr, 11) formatted_lines = traceback.format_exc() myPrint(formatted_lines, 13) sys.exit(1) prev_time, prstr = timeDiff(start_time) myPrint("Total time taken by the script: " + prstr, 11) return sys.exit(0)
def clean(self): myPrint("Cleaning stat folder ", 3) for item in os.listdir(statPath): if item.endswith(".log"): os.remove(os.path.join(statPath, item))
def test_util(self): print("OKay") myPrint(ridToCenterTimestamp("10002100040000220201011155747"))
def test_listBuckets(self): print("OKay") m = MinioWrapper() myPrint(m.listBuckets())
def __init__(self): myPrint(conf.minio_endpoint) myPrint(conf.access_key) myPrint(conf.region) self.client = self.createConnection()
def test_bucketExists(self): m = MinioWrapper() myPrint(m.bucketExists("10001100010002420210223073024"))
def run(self): bucket_names = self.m.listBucketNames() writeJsonFile(bucketListPath, bucket_names) myPrint("Total " + str(len(bucket_names)) + " buckets found")
def main(): args, parser = args_parse() initLogger(logPath) start_time = getTimeInSec() db = DatabaseSession(conf.db_host, conf.db_port, conf.db_user, conf.db_pass) try: prev_time = start_time if args.action == 'get_vids' or args.action == 'all': myPrint("Action: get_vids", 1) vids = [] vid_dicts = db.getVids() for vid_dict in vid_dicts: vids.append(vid_dict['vid']) writeJsonFile(vidListPath, vids) prev_time, prstr = timeDiff(prev_time) myPrint("Time taken by Action get_vids: " + prstr, 11) if args.action == 'fetch_info' or args.action == 'all': output = [] myPrint("Action: fetch_info", 1) ms = MosipSession(conf.server, conf.regproc_client_id, conf.regproc_secret_key, conf.regproc_app_id) vids = getJsonFile(vidListPath) for vid in vids: myPrint("Operating on VID " + vid, 3) res = ms.getUin(vid) uin = res['identity']['UIN'] if conf.debug: myPrint("UIN: " + uin) modulo = int(uin) % conf.idrepo_modulo myPrint("Modulo: " + str(modulo)) salt_row = db.getHash(modulo) if salt_row is not None: salt = salt_row['salt'] if conf.debug: myPrint("Salt: " + salt) uin_hash = hashlib.sha256(bytes(uin + salt, 'utf-8')).hexdigest() mod_uin_hash = str(modulo) + "_" + uin_hash.upper() rid_row = db.getRid(mod_uin_hash) if rid_row is not None: rid = rid_row['rid'] myPrint("RID found") if conf.debug: myPrint("RID: " + rid) center_id, timestamp = ridToCenterTimestamp(rid) output.append({ 'vid': vid, 'uin': uin, 'mod_uin_hash': mod_uin_hash, 'salt': salt, 'rid': rid, 'center_id': center_id, 'timestamp': timestamp }) else: raise RuntimeError( "RID not found for for mod_uin_hash: " + mod_uin_hash) else: raise RuntimeError("salt not found for for modulo: " + str(modulo)) writeJsonFile(credentialPreparedDataPath, output) prev_time, prstr = timeDiff(prev_time) myPrint("Time taken by Action fetch_info: " + prstr, 11) if args.action == 'reprint' or args.action == 'all': myPrint("Action: reprint", 1) output = [] vids = getJsonFile(credentialPreparedDataPath) ms = MosipSession(conf.server, conf.ida_client_id, conf.ida_secret_key, conf.ida_app_id) for vidInfo in vids: myPrint("VID: " + vidInfo['vid'], 3) data = { "id": vidInfo['vid'], "credentialType": conf.credential_type, "issuer": conf.partner_id, "recepiant": "", "user": "******", "encrypt": False, "encryptionKey": "", "sharableAttributes": [], "additionalData": { 'centerId': vidInfo['center_id'], 'creationDate': vidInfo['timestamp'], 'registrationId': vidInfo['rid'] } } json_data = json.dumps(data, separators=(',', ':')) myPrint(json_data) if db.checkRequestInCredentialTransaction(json_data) is None: resp = ms.credentialRequest(data) output.append(resp) else: myPrint("Skipping credential request", 11) writeJsonFile(vidRequestId, output) myPrint('Input VIDs: ' + str(len(vids)), 12) myPrint('Output RequestIds: ' + str(len(output)), 12) prev_time, prstr = timeDiff(prev_time) myPrint("Time taken by Action reprint: " + prstr, 11) db.closeAll() except: db.closeAll() prev_time, prstr = timeDiff(start_time) myPrint("Total time taken by the script: " + prstr, 11) formatted_lines = traceback.format_exc() myPrint(formatted_lines, 13) sys.exit(1) prev_time, prstr = timeDiff(start_time) myPrint("Total time taken by the script: " + prstr, 11) return sys.exit(0)