def view_analysis(dataset_name, sample_name, recon_name, analysis_name): navbar = [(dataset_name, '/dataset/%s' % dataset_name), (sample_name, '/dataset/%s/sample/%s' % (dataset_name, sample_name)), (recon_name, '/dataset/%s/sample/%s/recon/%s' % (dataset_name, sample_name, recon_name)), (analysis_name, None)] errors = [] try: # Upload the data analysis = APTAnalysisDirectory.load_dataset_by_name( dataset_name, sample_name, recon_name, analysis_name) except DatasetParseException as exc: flash('No such analysis!') return redirect('/dataset/%s/sample/%s/recon/%s' % (dataset_name, sample_name, recon_name)) # Determine whether the dataset has been published is_published = APTDataDirectory.load_dataset_by_name( dataset_name).is_published() # Get the metadata analysis_metadata = analysis.load_metadata() return render_template('analysis.html', dataset_name=dataset_name, sample_name=sample_name, recon_name=recon_name, analysis_name=analysis_name, analysis=analysis, errors=errors, analysis_metadata=analysis_metadata, navbar=navbar, is_published=is_published)
def display_dataset(dataset_name): """Display metadata about a certain dataset""" errors = [] try: dataset = APTDataDirectory.load_dataset_by_name(dataset_name) except DatasetParseException as exc: dataset = None errors = exc.errors return render_template('dataset.html', name=dataset_name, dataset=dataset, errors=errors, navbar=[(dataset_name, '/dataset/%s' % dataset_name)]) samples, sample_errors = dataset.list_samples() errors.extend(sample_errors) metadata = dataset.get_metadata() return render_template('dataset.html', name=dataset_name, dataset=dataset, samples=samples, errors=errors, metadata=metadata, navbar=[(dataset_name, '/dataset/%s' % dataset_name) ])
def edit_dataset(dataset_name): """Edit dataset metadata""" title = 'Edit Dataset' description = 'Edit the general metadata of a dataset' navbar = [(dataset_name, '/dataset/%s' % dataset_name), ('Edit', '#')] try: dataset = APTDataDirectory.load_dataset_by_name(dataset_name) except (ValueError, AttributeError, DatasetParseException): return redirect("/dataset/" + dataset_name) if request.method == 'POST': form = DatasetForm(request.form) if form.validate(): dataset.update_metadata(form) return redirect('/dataset/' + dataset_name) else: return render_template('dataset_create.html', title=title, description=description, form=form, navbar=navbar) else: form = DatasetForm(**dataset.get_metadata().metadata) return render_template('dataset_create.html', title=title, description=description, form=form, navbar=navbar)
def list_datasets(): """List all datasets currently stored at default data path""" dir_info = APTDataDirectory.get_all_datasets(app.config['WORKING_PATH']) dir_valid = dict([(dir, isinstance(info, APTDataDirectory)) for dir, info in dir_info.items()]) return render_template("dataset_list.html", dir_info=dir_info, dir_valid=dir_valid, navbar=[('List Datasets', '#')])
def decorated_function(*args, **kwargs): # Handle failures dataset_name = kwargs['dataset_name'] try: data = APTDataDirectory.load_dataset_by_name(dataset_name) except DatasetParseException as exc: return redirect("/dataset/%s" % dataset_name) if data.is_published(): flash('Dataset has already been published!', 'warning') return redirect("/dataset/%s" % dataset_name) # Pass it along return fn(*args, **kwargs)
def create(): """Create a new dataset""" title = 'Create New Dataset' description = 'Create a new dataset on the NUCAPT server. A dataset describes a single set of similar experiments.' form = DatasetForm(request.form) if request.method == 'POST' and form.validate(): dataset = APTDataDirectory.initialize_dataset(form) return redirect('/dataset/%s' % dataset.name) return render_template('dataset_create.html', title=title, description=description, form=form, navbar=[('Create Dataset', '#')])
def view_sample(dataset_name, sample_name): """View metadata about sample""" navbar = [(dataset_name, '/dataset/%s' % dataset_name), (sample_name, '#')] # Load in the sample by name try: sample = APTSampleDirectory.load_dataset_by_name( dataset_name, sample_name) except DatasetParseException as exc: return render_template('sample.html', dataset_name=dataset_name, sample=sample, errors=exc.errors, navbar=navbar) # Load in the dataset is_published = APTDataDirectory.load_dataset_by_name( dataset_name).is_published() # Load in the sample information sample_metadata = None collection_metadata = None errors = [] try: sample_metadata = sample.load_sample_information() collection_metadata = sample.load_collection_metadata() recon_data, recon_metadata, recon_errors = sample.list_reconstructions( ) errors.extend(recon_errors) except DatasetParseException as err: errors.extend(err.errors) recon_data = [] recon_metadata = [] return render_template('sample.html', dataset_name=dataset_name, sample=sample, sample_name=sample_name, sample_metadata=sample_metadata, collection_metadata=collection_metadata, errors=errors, recon_data=list(zip(recon_data, recon_metadata)), navbar=navbar, is_published=is_published)
def view_reconstruction(dataset_name, sample_name, recon_name): navbar = [(dataset_name, '/dataset/%s' % dataset_name), (sample_name, '/dataset/%s/sample/%s' % (dataset_name, sample_name)), (recon_name, '#')] errors = [] try: # Load in the recon recon = APTReconstruction.load_dataset_by_name(dataset_name, sample_name, recon_name) recon_metadata = recon.load_metadata() except DatasetParseException as exc: errors = exc.errors # Determine whether the dataset has been published is_published = APTDataDirectory.load_dataset_by_name( dataset_name).is_published() pos_path = None rrng_path = None try: # Get the POS and RRNG files pos_path = recon.get_pos_file() rrng_path = recon.get_rrng_file() except DatasetParseException as exc: errors.extend(exc.errors) except: raise return render_template('reconstruction.html', dataset_name=dataset_name, sample_name=sample_name, recon_name=recon_name, recon=recon, recon_metadata=recon_metadata, errors=errors, pos_path=pos_path, rrng_path=rrng_path, navbar=navbar, is_published=is_published)
def create_reconstruction(dataset_name, sample_name): navbar = [(dataset_name, '/dataset/%s' % dataset_name), (sample_name, '/dataset/%s/sample/%s' % (dataset_name, sample_name)), ('Add Reconstruction', '#')] # Make sure this sample exists try: sample = APTSampleDirectory.load_dataset_by_name( dataset_name, sample_name) except DatasetParseException as exc: return redirect("/dataset/%s/sample/%s" % (dataset_name, sample_name)) # Create the form if request.method == 'POST': form = AddAPTReconstructionForm(request.form) else: # Load the existing reconstructions recons, _, _ = sample.list_reconstructions() # Populate the metadata new_metadata = dict(name='Reconstruction%d' % (len(recons) + 1)) if len(recons) == 0: # Try to find another sample samples, _ = APTDataDirectory.load_dataset_by_name( dataset_name).list_samples() for sample in sorted(samples, key=lambda x: x.name)[::-1]: my_recons, _, _ = sample.list_reconstructions() if len(my_recons) > 0: recons = my_recons break # If you can find a reconstruction, prepopulate the form if len(recons) > 0: old_metadata = sorted(recons, key=lambda x: x.name)[-1].load_metadata() new_metadata.update(old_metadata.metadata) # Create the form form = AddAPTReconstructionForm(**new_metadata) # Make sure it validates if request.method == 'POST' and form.validate(): try: errors = [] # check the files pos_file = request.files['pos_file'] if not pos_file.filename.lower().endswith('.pos'): errors.append('POS File must have the extension ".pos"') rrng_file = request.files['rrng_file'] if not rrng_file.filename.lower().endswith('.rrng'): errors.append('RRNG File must have extension ".rrng"') # Find if there is a tip image tip_image_path = None if 'tip_image' in request.files: tip_image = request.files['tip_image'] tip_image_path = 'tip_image.%s' % ( tip_image.filename.split(".")[-1]) # If errors, raise if len(errors) > 0: raise DatasetParseException(errors) # check the metadata recon_name = APTReconstruction.create_reconstruction( form, dataset_name, sample_name, tip_image_path) except DatasetParseException as err: return render_template('reconstruction_create.html', form=form, dataset_name=dataset_name, sample_name=sample_name, errors=errors + err.errors, navbar=navbar) # If valid, upload the data recon = APTReconstruction.load_dataset_by_name(dataset_name, sample_name, recon_name) pos_file.save( os.path.join(recon.path, secure_filename(pos_file.filename))) rrng_file.save( os.path.join(recon.path, secure_filename(rrng_file.filename))) if 'tip_image' in request.files: tip_image = request.files['tip_image'] tip_image.save( os.path.join( recon.path, 'tip_image.%s' % (tip_image.filename.split(".")[-1]))) return redirect("/dataset/%s/sample/%s/recon/%s" % (dataset_name, sample_name, recon_name)) return render_template('reconstruction_create.html', form=form, dataset_name=dataset_name, sample_name=sample_name, navbar=navbar)
def create_sample(dataset_name): """Create a new sample for a dataset""" navbar = [(dataset_name, 'dataset/%s' % dataset_name), ('Create Sample', '#')] # Load in the dataset try: dataset = APTDataDirectory.load_dataset_by_name(dataset_name) except DatasetParseException as exc: return redirect('/dataset/' + dataset_name) # Initialize form data if request.method == 'POST': form = APTSampleForm(request.form) else: samples, errors = dataset.list_samples() # Make a new name new_metadata = {'sample_name': 'Sample%d' % (len(samples) + 1)} if len(samples) > 0: # Copy data from another sample last_sample = sorted(samples, key=lambda x: x.name)[-1] # Loop over each subfield for n, m in zip( ['sample_form', 'collection_form', 'preparation_form'], [ last_sample.load_sample_information(), last_sample.load_collection_metadata(), last_sample.load_preparation_metadata() ]): new_metadata[n] = m.metadata # Initialize the form form = APTSampleForm(**new_metadata) if request.method == 'POST' and form.validate(): # attempt to validate the metadata try: sample_name = APTSampleDirectory.create_sample(dataset_name, form) except DatasetParseException as err: return render_template('sample_create.html', form=form, name=dataset_name, errors=err.errors, navbar=navbar) # Crate the sample sample = APTSampleDirectory.load_dataset_by_name( dataset_name, sample_name) # If present, upload file rhit_file = request.files.get('rhit_file', None) if 'rhit_file' not in request.files or rhit_file.filename == "": pass # Do nothing elif rhit_file.filename.lower().endswith('.rhit'): rhit_file.save( os.path.join(sample.path, secure_filename(rhit_file.filename))) else: # Clear the old sample shutil.rmtree(sample.path) return render_template('sample_create.html', form=form, name=dataset_name, errors=['File must have extension RHIT'], navbar=navbar) return redirect("/dataset/%s/sample/%s" % (dataset_name, sample_name)) # If GET request, make a new sample name return render_template('sample_create.html', form=form, name=dataset_name, navbar=navbar)
def publish_dataset(dataset_name): """Publish a dataset to the Materials Data Facility""" navbar = [(dataset_name, '/dataset/%s' % dataset_name), ('Publish', '#')] # Check that this is a good dataset try: data = APTDataDirectory.load_dataset_by_name(dataset_name) except (ValueError, AttributeError, DatasetParseException): return redirect("/dataset/" + dataset_name) # Check if the dataset has already been published if request.method == 'POST': # Get the user data form = PublicationForm(request.form) if not form.validate(): raise Exception('Form failed to validate') # For debugging, do not submit anything to Publish if app.config.get('DEBUG_SKIP_PUB', False): data.mark_as_published('DEBUG') return redirect('/dataset/' + dataset_name) # Create the PublicationClient globus_publish_client = DataPublicationClient( authorizer=RefreshTokenAuthorizer( session["tokens"]["publish.api.globus.org"]["refresh_token"], load_portal_client())) # Create the transfer client mdf_transfer_client = TransferClient(authorizer=RefreshTokenAuthorizer( session["tokens"]["transfer.api.globus.org"]["refresh_token"], load_portal_client())) # Create the publication entry try: md_result = globus_publish_client.push_metadata( app.config.get("PUBLISH_COLLECTION"), form.convert_to_globus_publication()) pub_endpoint = md_result['globus.shared_endpoint.name'] pub_path = os.path.join(md_result['globus.shared_endpoint.path'], "data") + "/" submission_id = md_result["id"] except Exception as e: # TODO: Update status - not Published due to bad metadata raise e # Transfer data try: # '/' of the Globus endpoint for the working data is the working data path data_path = '/%s/' % (os.path.relpath(data.path, app.config['WORKING_PATH'])) toolbox.quick_transfer(mdf_transfer_client, app.config["WORKING_DATA_ENDPOINT"], pub_endpoint, [(data_path, pub_path)], timeout=0) except Exception as e: # TODO: Update status - not Published due to failed Transfer raise e # Send submission in for review try: globus_publish_client.complete_submission(submission_id) except Exception as e: # TODO: Raise exception - not Published due to Publish error raise e # Mark dataset as complete. data.mark_as_published(submission_id) # Redirect to Globus Publish webpage return redirect("/dataset/" + dataset_name) else: default_values = data.get_metadata().metadata default_values['contact_person'] = session.get('name') default_values['contact_email'] = session.get('email') form = PublicationForm(**default_values) return render_template("dataset_publish.html", data=data, form=form, navbar=navbar)