searchPGS = c.get('resource/search', { 'q': 'Phylogenetic signal', 'types': '["item"]' }) if len(searchPGS['item']) == 0: uploadPGS = True # Read our analyses into Python dictionaries and upload them to girder. if uploadACR: ACR = {} with open("%s/ancestral-state/aceArbor.json" % arborWebAppsPath, "r") as acrFile: acrStr = acrFile.read() ACR['analysis'] = json.loads(acrStr) item = c.createItem(folderId, 'aceArbor', 'Ancestral state reconstruction') c.addMetadataToItem(item['_id'], ACR) print "aceArbor successfully uploaded" else: print "aceArbor already exists" if uploadPGS: PGS = {} with open( "%s/phylogenetic-signal/Phylogenetic_signal.json" % arborWebAppsPath, "r") as pgsFile: pgsStr = pgsFile.read() PGS['analysis'] = json.loads(pgsStr) item = c.createItem(folderId, 'Phylogenetic signal', 'Phylogenetic signal') c.addMetadataToItem(item['_id'], PGS) print "Phylogenetic signal successfully uploaded"
'name': 'Data', 'description': 'Data Folder', 'public': 'true' }) c.post('folder', parameters={ 'parentType': 'collection', 'parentId': collection['_id'], 'name': 'Analyses', 'description': 'Analysis folder', 'public': 'true' }) else: collection = collection_search['collection'][0] # Get the 'Analyses' folder for this collection. analysis_folder = c.load_or_create_folder('Analyses', collection['_id'], 'collection') folder_id = analysis_folder['_id'] # Read this analysis into a Python dictionary and upload it to Girder. analysis = {} with open (fullpath, "r") as analysis_file: analysis_str = analysis_file.read() try: analysis['analysis'] = json.loads(analysis_str) except ValueError: print "Could not read valid JSON from %s" % analysis_filename continue item = c.createItem(folder_id, analysis_name, analysis_name) c.addMetadataToItem(item['_id'], analysis) print "%s successfully uploaded to %s" % (analysis_filename, collection_name)
uploadACR = True searchPGS = c.get('resource/search', { 'q': 'Phylogenetic signal', 'types': '["item"]' }) if len(searchPGS['item']) == 0: uploadPGS = True # Read our analyses into Python dictionaries and upload them to girder. if uploadACR: ACR = {} with open ("%s/ancestral-state/aceArbor.json" % args.path, "r") as acrFile: acrStr = acrFile.read() ACR['analysis'] = json.loads(acrStr) item = c.createItem(folderId, 'aceArbor', 'Ancestral state reconstruction') c.addMetadataToItem(item['_id'], ACR) print "aceArbor successfully uploaded" else: print "aceArbor already exists" if uploadPGS: PGS = {} with open ("%s/phylogenetic-signal/Phylogenetic_signal.json" % args.path, "r") as pgsFile: pgsStr = pgsFile.read() PGS['analysis'] = json.loads(pgsStr) item = c.createItem(folderId, 'Phylogenetic signal', 'Phylogenetic signal') c.addMetadataToItem(item['_id'], PGS) print "Phylogenetic signal successfully uploaded" else: print "Phylogenetic signal already exists"
output_file_name = input_file_name.replace('.nc', '.json') try: # Now download the dataset (fd, filepath) = tempfile.mkstemp() os.close(fd) client.downloadFile(fileId, filepath) # Create temp file and convert to GeoJs contour JSON format output_dir = tempfile.mkdtemp() output_filepath = os.path.join(output_dir, output_file_name) with open(output_filepath, 'w') as fp: fp.write(json_util.dumps(convert(filepath, variable, timestep))) # Create an item for this file output_item = client.createItem(dataset_folder_id, output_file_name, output_file_name) # Now upload the result client.uploadFileToItem(output_item['_id'], output_filepath) output_item_id = output_item['_id'] # Finally promote item to dataset client.post('minerva_dataset/%s/dataset' % output_item_id) finally: if filepath and os.path.exists(filepath): os.remove(filepath) if output_dir and os.path.exists(output_dir): shutil.rmtree(output_dir)