def requestAvatarId(self, credentials): try: pydas.login(email=credentials.username, password=credentials.password, application='Midasftp Server', url=self.url) except pydas.exceptions.PydasException as detail: print "Caught PydasException: ", detail return defer.fail(error.LoginFailed("Invalid email or password")) return defer.succeed((credentials.username, pydas, self.url))
def uploadItem(self, itemName, outputFolderId, out_file=None, item_description=None): # read everything in the outdir and upload it as a single item # create a new item # need a folder id (email, apiKey, url) = self.pydasParams pydas.login(email=email, api_key=apiKey, url=url) if item_description is not None: item = pydas.session.communicator.create_item(pydas.session.token, itemName, outputFolderId, description=item_description) else: item = pydas.session.communicator.create_item(pydas.session.token, itemName, outputFolderId) item_id = item['item_id'] if out_file is not None: # only upload this one file upload_token = pydas.session.communicator.generate_upload_token(pydas.session.token, item_id, out_file) filepath=os.path.join(self.outdir, out_file) pydas.session.communicator.perform_upload(upload_token, out_file, itemid=item_id, filepath=filepath) else: for filename in os.listdir(self.outdir): upload_token = pydas.session.communicator.generate_upload_token(pydas.session.token, item_id, filename) filepath=os.path.join(self.outdir, filename) pydas.session.communicator.perform_upload(upload_token, filename, itemid=item_id, filepath=filepath) # set the output item as an output for the job method = 'midas.pyslicer.add.job.output.item' parameters = {} parameters['token'] = pydas.session.token parameters['job_id'] = self.jobId parameters['item_id'] = item_id print parameters pydas.session.communicator.request(method, parameters) return item_id
def sanity_check(sync_setting): """ Sanity check for input parameters """ if sync_setting.mode not in ('check', 'upload', 'download'): print ("Caught a sanity check error: mode %s is not supported! " \ "Only 3 modes are supported: check, upload or download." % \ sync_setting.mode) return False if not os.path.isdir(sync_setting.local_root_dir): print ("Caught a sanity check error: data directory %s does not exist!" \ % sync_setting.local_root_dir) return False elif sync_setting.mode == 'download' and \ not os.access(sync_setting.local_root_dir, os.W_OK): print ("Caught a sanity check error: in download mode, " \ "write permission is needed for data directory %s !" \ % sync_setting.local_root_dir) return False try: pydas.login(email=sync_setting.midas_user_email, api_key=sync_setting.midas_apikey, url=sync_setting.midas_url) pydas.session.communicator.folder_get(pydas.session.token, sync_setting.midas_root_folder_id) if sync_setting.mode == 'upload': return _upload_permision_check(sync_setting.local_root_dir, sync_setting.midas_root_folder_id) except pydas.exceptions.PydasException as detail: print "Caught PydasException: ", detail return False return True
def connect_to_midas(email=None, api_key=None, midas_url='https://midas3.kitware.com/midas/'): if not api_key: print('Please enter your login information for ' + midas_url) pydas.login(url=midas_url, email=email) else: pydas.login(url=midas_url, email=email, api_key=api_key) session = pydas.session communicator = session.communicator return session, communicator
def uploadOutputImpl(self): #print "segmodeluploadoutputimpl" (email, apiKey, url) = self.pydasParams pydas.login(email=email, api_key=apiKey, url=url) folder = pydas.session.communicator.create_folder(pydas.session.token, 'output_'+self.jobId, self.outputFolderId) folder_id = folder['folder_id'] item_id = self.uploadItem(self.outputVolumeName, folder_id, self.transformed_volume, item_description='output volume') item_id = self.uploadItem(self.outputTransformName, folder_id, self.transform, item_description='output transform')
def connect_to_midas(email=None, api_key=None, midas_url='http://midas3.kitware.com/midas/'): if not api_key: print('Please enter your login information for ' + midas_url) pydas.login(url=midas_url, email=email) else: pydas.login(url=midas_url, email=email, api_key=api_key) session = pydas.session communicator = session.communicator return session, communicator
def connect_to_midas(email=None, api_key=None): midas_url = 'https://midas3.kitware.com/midas/' #pydas.login(url=midas_url, email=email, api_key=api_key) try: pydas.login(url=midas_url, email=email, api_key=api_key) except: print('Error occurred while logging in to ' + midas_url) sys.exit(1) session = pydas.session communicator = session.communicator return session, communicator
def reportMidasStatus(self, status, condition=None): # TODO add these methods to pydas # TODO add condition to api call (email, apiKey, url) = self.pydasParams pydas.login(email=email, api_key=apiKey, url=url) method = 'midas.pyslicer.update.job' parameters = {} parameters['token'] = pydas.session.token parameters['job_id'] = self.jobId parameters['status'] = status if condition is not None: parameters['condition'] = condition print parameters pydas.session.communicator.request(method, parameters)
def downloadItem(self, itemId): (email, apiKey, url) = self.pydasParams pydas.login(email=email, api_key=apiKey, url=url) pydas.api._download_item(itemId, self.datadir) # unzip any zipped files for filename in os.listdir(self.datadir): if filename.endswith('.zip'): filepath = os.path.join(self.datadir, filename) zip = zipfile.ZipFile(filepath) zip.extractall(self.datadir) zip.close() # return the path to the name of the item item = pydas.session.communicator.item_get(pydas.session.token, itemId) return os.path.join(self.datadir, item['name'])
def register_events(self): # get all the events, register them with the midas server self.define_events() events = self.events_map.values() method = 'midas.pyslicer.add.jobstatuses' parameters = {} json_events = json.dumps([str(event) for event in events]) print json_events (email, apiKey, url) = self.pydasParams pydas.login(email=email, api_key=apiKey, url=url) parameters['token'] = pydas.session.token parameters['events'] = json_events event_id_to_jobstatus_id = pydas.session.communicator.request(method, parameters) for (event_id, jobstatus_id) in event_id_to_jobstatus_id.items(): event = self.events_map[event_id] event.jobstatus_id = jobstatus_id
def login(): global token, mc, gc # TODO - not global token = pydas.login(email=MIDAS_LOGIN, api_key=MIDAS_API_KEY, url=MIDAS_URL) mc = pydas.session.communicator gc = girder_client.GirderClient(apiUrl=GIRDER_URL) gc.authenticate(username=GIRDER_LOGIN, apiKey=GIRDER_API_KEY)
def __init__(self,email,token=None,url=None): self.__assetroot = "/usr/local/src/Midas3/data/assetstore/" self.email=email self.token = pydas.login(email,url=url,api_key=token) self.url = url self.createDriver() user = self.driver.get_user_by_email(self.email) self.writeRoot = os.path.join("/","tmp",user['uuid']) if( not os.path.exists(self.writeRoot) ): os.mkdir(self.writeRoot)
def sanity_check(midas_setting, excel_setting): """ Sanity check for input parameters """ if not os.path.isfile(excel_setting.excel_file): print ("Caught a sanity check error: Metadata source file %s does not exist!" \ % excel_setting.excel_file) return False elif os.path.splitext(excel_setting.excel_file)[1] not in ['.xls', '.xlsx']: print ("Caught a sanity check error: Metadata source file %s is not an excel file!" \ % excel_setting.excel_file) return False try: pydas.login(email=midas_setting.midas_user_email, api_key=midas_setting.midas_apikey, url=midas_setting.midas_url) pydas.session.communicator.folder_get(pydas.session.token, midas_setting.midas_root_folder_id) return _midas_permision_check(midas_setting.midas_root_folder_id) except pydas.exceptions.PydasException as detail: print "Caught PydasException: ", detail return False return True
def uploadToMidas(processingDir, midasEmail, midasApiKey, midasUrl, midasDestination): """ rename DICOM files and put files with same SeriesInstanceUID into the same directory; upload files to Midas using Pydas, one item per directory. """ pydas.login(email=midasEmail, api_key=midasApiKey, url=midasUrl) extract_dicom_callback = lambda communicator, token, item_id: communicator.extract_dicommetadata( token, item_id) pydas.add_item_upload_callback(extract_dicom_callback) series_dirs = os.listdir(processingDir) for series_dir in series_dirs: series_dir_abspath = os.path.join(processingDir, series_dir) logger.info( "use Pydas to upload DOCOM files who have SeriesInstanceUID : %s" % series_dir) pydas.upload(series_dir_abspath, destination=midasDestination, leaf_folders_as_items=True) shutil.rmtree(series_dir_abspath) return True
def main(): serverURL = "http://vivabrain.u-strasbg.fr/midas" print "Pydas AngioTK data downloader" print "Usage: " + sys.argv[ 0] + " <optinal:data_url_in_AngioTK_Community> ..." print "Example: " + sys.argv[0] + "" print " -> will download all the public and private datasets" print "Example: " + sys.argv[0] + " Public" print " -> will download all the public datasets" print "" # Build an array of data to download # defaults to public and private data baseURL = "/communities/AngioTK" dataURL = ["Public", "Private"] # If we have arguments, we replace the default directories to be downloaded if (len(sys.argv) > 1): dataURL = [] for i in range(1, len(sys.argv)): dataURL.append(sys.argv[i]) print "The following data will be downloaded:" for i in range(len(dataURL)): print "- " + baseURL + "/" + dataURL[i] print "" print "Using pydas " + pydas.__version__ print "Using server URL: " + serverURL core_driver = pydas.drivers.CoreDriver(serverURL) print "Server version: " + core_driver.get_server_version() token = pydas.login(url=serverURL) print "API token = \"" + token + "\"" # Eventually download data for i in range(len(dataURL)): remoteURL = baseURL + "/" + dataURL[i] localPath = "." #os.path.dirname("." + baseURL + "/" + dataURL[i]) if (not os.path.exists(localPath)): os.makedirs(localPath) #else: #shutil.rmtree(localPath) #os.makedirs(localPath) pydas.download(remoteURL, local_path=localPath)
def main(): serverURL = "http://vivabrain.u-strasbg.fr/midas" print "Pydas AngioTK data downloader" print "Usage: " + sys.argv[0] + " <optinal:data_url_in_AngioTK_Community> ..." print "Example: " + sys.argv[0] + "" print " -> will download all the public and private datasets" print "Example: " + sys.argv[0] + " Public" print " -> will download all the public datasets" print "" # Build an array of data to download # defaults to public and private data baseURL = "/communities/AngioTK" dataURL = [ "Public", "Private" ] # If we have arguments, we replace the default directories to be downloaded if(len(sys.argv) > 1): dataURL = [] for i in range(1, len(sys.argv)): dataURL.append(sys.argv[i]) print "The following data will be downloaded:" for i in range(len(dataURL)): print "- " + baseURL + "/" + dataURL[i] print "" print "Using pydas " + pydas.__version__ print "Using server URL: " + serverURL core_driver = pydas.drivers.CoreDriver(serverURL) print "Server version: " + core_driver.get_server_version() token = pydas.login(url=serverURL) print "API token = \"" + token + "\"" # Eventually download data for i in range(len(dataURL)): remoteURL = baseURL + "/" + dataURL[i] localPath = "." #os.path.dirname("." + baseURL + "/" + dataURL[i]) if(not os.path.exists(localPath)): os.makedirs(localPath) #else: #shutil.rmtree(localPath) #os.makedirs(localPath) pydas.download(remoteURL, local_path=localPath)
""" import pydas import os import sys import json if __name__ == "__main__": configFile = sys.argv[1] downloadTo = sys.argv[2] findFolder = sys.argv[3] fid = open(configFile).read() config = json.loads(fid) id = '10255' pydas.login(email=config['email'], api_key=config['api_key'], url=config['url']) if not os.path.exists(downloadTo): os.makedirs(downloadTo) _, id = pydas.api._search_folder_for_item_or_folder(findFolder, id) if id > 0: dataDir = os.path.join(downloadTo, findFolder) pydas.api._download_folder_recursive(id, downloadTo)
import pydas MIDAS_URL = 'http://127.0.0.1' MIDAS_LOGIN = '******' MIDAS_API_KEY = 'API_KEY' token = pydas.login(email=MIDAS_LOGIN, api_key=MIDAS_API_KEY, url=MIDAS_URL) mc = pydas.session.communicator def handle_item(item, bc): bc = bc + (item['name'],) print '/'.join(bc) def handle_folder(folder, bc): bc = bc + (folder['name'],) children = mc.folder_children(token, folder['folder_id']) folders = children['folders'] items = children['items'] for child in folders: handle_folder(child, bc) for item in items: handle_item(item, bc) def handle_community(community): bc = ('collection', community['name']) children = mc.get_community_children(community['community_id'], token) folders = children['folders'] for folder in folders: handle_folder(folder, bc) def handle_user(user):
except ImportError: raise ImportError( """ Have you installed docopt? If not, you will need to either system-wide or within a virtualenv: pip install docopt or pip install --user docopt """ ) argv = docopt(__doc__, version="0.1") midasURL = "http://slicer.kitware.com/midas3" if argv["--apikey"] is None: sessionToken = pydas.login( url=midasURL, email=argv["--email"], password=argv["--password"] ) else: sessionToken = pydas.login( url=midasURL, email=argv["--email"], api_key=argv["--apikey"] ) communicator = pydas.session.communicator for driver in communicator.drivers: if isinstance(driver, pydas.drivers.CoreDriver): break community = driver.get_community_by_name(name="BRAINSTools") for folder in driver.folder_children( token=sessionToken, folder_id=community["folder_id"] )["folders"]: if folder["name"] == "Public": break
try: from docopt import docopt except ImportError: raise ImportError(""" Have you installed docopt? If not, you will need to either system-wide or within a virtualenv: pip install docopt or pip install --user docopt """) argv = docopt(__doc__, version='0.1') midasURL = "http://slicer.kitware.com/midas3" if argv['--apikey'] is None: sessionToken = pydas.login(url=midasURL, email=argv['--email'], password=argv['--password']) else: sessionToken = pydas.login(url=midasURL, email=argv['--email'], api_key=argv['--apikey']) communicator = pydas.session.communicator for driver in communicator.drivers: if isinstance(driver, pydas.drivers.CoreDriver): break community = driver.get_community_by_name(name="BRAINSTools") for folder in driver.folder_children(token=sessionToken, folder_id=community['folder_id'])['folders']: if folder['name'] == u'Public': break publicFolderID = folder['folder_id'] dirpath = os.path.abspath(argv['DIR']) assert os.path.isdir(dirpath), "Not a directory" for root, dirs, files in os.walk(dirpath): for filename in files:
try: from docopt import docopt except ImportError: raise ImportError(""" Have you installed docopt? If not, you will need to either system-wide or within a virtualenv: pip install docopt or pip install --user docopt """) argv = docopt(__doc__, version="0.1") midasURL = "http://slicer.kitware.com/midas3" if argv["--apikey"] is None: sessionToken = pydas.login(url=midasURL, email=argv["--email"], password=argv["--password"]) else: sessionToken = pydas.login(url=midasURL, email=argv["--email"], api_key=argv["--apikey"]) communicator = pydas.session.communicator for driver in communicator.drivers: if isinstance(driver, pydas.drivers.CoreDriver): break community = driver.get_community_by_name(name="BRAINSTools") for folder in driver.folder_children( token=sessionToken, folder_id=community["folder_id"])["folders"]: if folder["name"] == "Public": break publicFolderID = folder["folder_id"]
try: from docopt import docopt except ImportError: raise ImportError(""" Have you installed docopt? If not, you will need to either system-wide or within a virtualenv: pip install docopt or pip install --user docopt """) argv = docopt(__doc__, version='0.1') midasURL = "http://slicer.kitware.com/midas3" if argv['--apikey'] is None: sessionToken = pydas.login(url=midasURL, email=argv['--email'], password=argv['--password']) else: sessionToken = pydas.login(url=midasURL, email=argv['--email'], api_key=argv['--apikey']) communicator = pydas.session.communicator for driver in communicator.drivers: if isinstance(driver, pydas.drivers.CoreDriver): break community = driver.get_community_by_name(name="BRAINSTools") for folder in driver.folder_children( token=sessionToken, folder_id=community['folder_id'])['folders']: if folder['name'] == 'Public': break publicFolderID = folder['folder_id']
def main(argv=None): if argv is None: argv = sys.argv parser = OptionParser() parser.add_option("", "--outDir", help="Directory where to store downloaded files", default=".") parser.add_option("", "--config", help="MIDAS configuration file") parser.add_option("", "--logto", help="Logging file") parser.add_option("", "--logat", help="Logging level (see code)", default="warning") (options, args) = parser.parse_args() # Configure logging logging.basicConfig( level=LOGGING_LEVELS.get(options.logat, logging.NOTSET), filename=options.logto, format='%(asctime)s [%(funcName)s] %(levelname)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S') logger = logging.getLogger() # Read MIDAS loging credentials if (options.config is None): print "Config file missing!" return -1 config_fid = open(options.config).read() config = json.loads(config_fid) # Folder ID for the top-level data folder in MIDAS # Here: Designed Database of MR Brain Images of Healthy Individuals base_folder_id = '8051' pydas.login(email=config['email'], api_key=config['api_key'], url=config['url']) # Get attributes for top-level folder cur_folder = pydas.session.communicator.folder_get(pydas.session.token, base_folder_id) logger.debug("Output directory = %s" % options.outDir) download_dir = options.outDir if not os.path.exists(download_dir): print "Directory %s not existent!" % download_dir # Currently, max(patientID) = 109 patient_id_range = range(1, 110) for patient_id in patient_id_range: folder_search_str = "Normal-%.3d" % patient_id (dummy, found_patient_folder_id ) = pydas.api._search_folder_for_item_or_folder( folder_search_str, base_folder_id) if (found_patient_folder_id > 0): data_dir = os.path.join(download_dir, "Normal-%.3d" % patient_id) cur_children = pydas.session.communicator.folder_children( pydas.session.token, found_patient_folder_id) # Check if we have auxillary data available in the # current patient folder has_aux = False for sub_folder in cur_children['folders']: if sub_folder['name'] == 'AuxillaryData': has_aux = True # In case we found auxillary data, download ... if has_aux: if (not os.path.exists(data_dir)): os.makedirs(data_dir) logger.debug("Downloading patient data to %s" % data_dir) for sub_folder in cur_children['folders']: item_id = -1 # We need the MRA data if sub_folder['name'] == 'MRA': target_file = "Normal%.3d-MRA.mha" % patient_id item_id = item_search(target_file, sub_folder['folder_id']) if (item_id > 0): pydas.api._download_item(item_id, data_dir) else: logger.warning("%s not found!" % target_file) # ... ,the T1-Flash data elif sub_folder['name'] == 'T1-Flash': target_file = "Normal%.3d-T1-Flash.mha" % patient_id item_id = item_search(target_file, sub_folder['folder_id']) if (item_id > 0): pydas.api._download_item(item_id, data_dir) else: logger.warning("%s not found!" % target_file) # and the 1) extracted vascular network data (.tre) as well # as the skull-stripped T1-Flash MR images elif sub_folder['name'] == 'AuxillaryData': item_id = item_search("VascularNetwork.tre", sub_folder['folder_id']) if (item_id > 0): pydas.api._download_item(item_id, data_dir) else: logger.warning( "Could not find VascularNetwork.tre") item_id = item_search("SkullStripped-T1-Flash.mha", sub_folder['folder_id']) if (item_id > 0): pydas.api._download_item(item_id, data_dir) else: logger.warning( "Could not find SkullStripped-T1-Flash.mha") else: logger.warning( "No AuxillaryData directory, skipping patient %d!" % patient_id)
def main(argv=None): if argv is None: argv = sys.argv parser = OptionParser() parser.add_option("", "--outDir", help="Directory where to store downloaded files", default=".") parser.add_option("", "--config", help="MIDAS configuration file") parser.add_option("", "--logto", help="Logging file") parser.add_option("", "--logat", help="Logging level (see code)", default="warning") (options, args) = parser.parse_args() # Configure logging logging.basicConfig(level=LOGGING_LEVELS.get(options.logat, logging.NOTSET), filename=options.logto, format='%(asctime)s [%(funcName)s] %(levelname)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S') logger = logging.getLogger() # Read MIDAS loging credentials if (options.config is None): print "Config file missing!" return -1 config_fid = open(options.config).read() config = json.loads(config_fid) # Folder ID for the top-level data folder in MIDAS # Here: Designed Database of MR Brain Images of Healthy Individuals base_folder_id = '8051' pydas.login( email=config['email'], api_key=config['api_key'], url=config['url']) # Get attributes for top-level folder cur_folder = pydas.session.communicator.folder_get( pydas.session.token, base_folder_id) logger.debug("Output directory = %s" % options.outDir) download_dir = options.outDir; if not os.path.exists(download_dir): print "Directory %s not existent!" % download_dir # Currently, max(patientID) = 109 patient_id_range = range(1,110) for patient_id in patient_id_range: folder_search_str = "Normal-%.3d" % patient_id (dummy, found_patient_folder_id) = pydas.api._search_folder_for_item_or_folder( folder_search_str, base_folder_id) if (found_patient_folder_id > 0): data_dir = os.path.join(download_dir, "Normal-%.3d" % patient_id) cur_children = pydas.session.communicator.folder_children( pydas.session.token, found_patient_folder_id) # Check if we have auxillary data available in the # current patient folder has_aux = False for sub_folder in cur_children['folders']: if sub_folder['name'] == 'AuxillaryData': has_aux = True # In case we found auxillary data, download ... if has_aux: if (not os.path.exists(data_dir)): os.makedirs(data_dir) logger.debug("Downloading patient data to %s" % data_dir) for sub_folder in cur_children['folders']: item_id = -1 # We need the MRA data if sub_folder['name'] == 'MRA': target_file = "Normal%.3d-MRA.mha" % patient_id item_id = item_search(target_file, sub_folder['folder_id']) if (item_id > 0): pydas.api._download_item(item_id, data_dir) else: logger.warning("%s not found!" % target_file) # ... ,the T1-Flash data elif sub_folder['name'] == 'T1-Flash': target_file = "Normal%.3d-T1-Flash.mha" % patient_id item_id = item_search(target_file, sub_folder['folder_id']) if (item_id > 0): pydas.api._download_item(item_id, data_dir) else: logger.warning("%s not found!" % target_file) # and the 1) extracted vascular network data (.tre) as well # as the skull-stripped T1-Flash MR images elif sub_folder['name'] == 'AuxillaryData': item_id = item_search("VascularNetwork.tre", sub_folder['folder_id']) if (item_id > 0): pydas.api._download_item(item_id, data_dir) else: logger.warning("Could not find VascularNetwork.tre") item_id = item_search("SkullStripped-T1-Flash.mha", sub_folder['folder_id']) if (item_id > 0): pydas.api._download_item(item_id, data_dir) else: logger.warning("Could not find SkullStripped-T1-Flash.mha") else: logger.warning("No AuxillaryData directory, skipping patient %d!" % patient_id)