def decode_url(url, metadata={}, render=True): # Basic URL validation if not url.startswith('http://'): url = 'http://' + url ext = re.search('\.nii(\.gz)?$', url) if ext is None: return error_page("Invalid image extension; currently the decoder only" " accepts images in nifti format.") # Check that an image exists at the URL head = requests.head(url) if head.status_code not in [200, 301, 302]: return error_page("No image was found at the provided URL.") headers = head.headers if 'content-length' in headers and \ int(headers['content-length']) > 4000000 and render: return error_page("The requested Nifti image is too large. Files must " "be under 4 MB in size.") dec = _get_decoding(url=url) # Delete old record if not settings.CACHE_DECODINGS and dec is not None: db.session.delete(dec) db.session.commit() dec = None if dec is None: unique_id = uuid.uuid4().hex filename = join(settings.DECODED_IMAGE_DIR, unique_id + ext.group(0)) f = requests.get(url) with open(filename, 'wb') as outfile: outfile.write(f.content) # Make sure celery worker has permission to overwrite os.chmod(filename, 0666) # Named args to pass to Decoding initializer modified = headers.get('last-modified', None) if modified is not None: modified = datetime(*parsedate(modified)[:6]) kwargs = { 'uuid': unique_id, 'url': url, 'name': metadata.get('name', basename(url)), 'image_modified_at': modified, 'filename': filename, 'neurovault_id': metadata.get('nv_id', None) } dec = _run_decoder(**kwargs) if render: return show(dec, dec.uuid) else: return dec.uuid
def decode_url(url, metadata={}, render=True): # Basic URL validation if not re.search('^https?\:\/\/', url): url = 'http://' + url ext = re.search('\.nii(\.gz)?$', url) if ext is None: return error_page("Invalid image extension; currently the decoder only" " accepts images in nifti format.") # Check that an image exists at the URL head = requests.head(url) if head.status_code not in [200, 301, 302]: return error_page("No image was found at the provided URL.") headers = head.headers if 'content-length' in headers and \ int(headers['content-length']) > 4000000 and render: return error_page("The requested Nifti image is too large. Files must " "be under 4 MB in size.") dec = _get_decoding(url=url) # Delete old record if not settings.CACHE_DECODINGS and dec is not None: db.session.delete(dec) db.session.commit() dec = None if dec is None: unique_id = uuid.uuid4().hex filename = join(settings.DECODED_IMAGE_DIR, unique_id + ext.group(0)) f = requests.get(url) with open(filename, 'wb') as outfile: outfile.write(f.content) # Make sure celery worker has permission to overwrite os.chmod(filename, 0666) # Named args to pass to Decoding initializer modified = headers.get('last-modified', None) if modified is not None: modified = datetime(*parsedate(modified)[:6]) kwargs = { 'uuid': unique_id, 'url': url, 'name': metadata.get('name', basename(url)), 'image_modified_at': modified, 'filename': filename, 'neurovault_id': metadata.get('nv_id', None) } dec = _run_decoder(**kwargs) if render: return show(dec, dec.uuid) else: return dec.uuid
def get_decoding_data(image, get_json=True): if Image.query.get(image) is None: return error_page("Invalid image requested for decoding. Please check" " to make sure there is a valid image with id=%d." % image) dec = decode_analysis_image(image) df = os.path.join(settings.DECODING_RESULTS_DIR, dec.uuid + '.txt') if not os.path.exists(df): return error_page("An unspecified error occurred during decoding.") data = open(df).read().splitlines() data = [x.split('\t') for x in data] data = [[f, round(float(v or '0'), 3)] for (f, v) in data] return jsonify(data=data) if get_json else data
def index(): # Call API to do the decoding status, dec = _get_decoding_object() if status == 200: return show(dec, dec.uuid) elif status == 99: return render_template('decode/index.html') else: return error_page(dec)
def run_custom_analysis(uid): """ Given a uuid, kick off the analysis run and redirect the user to the results page once the analysis is complete. """ custom = CustomAnalysis.query.filter_by(uuid=uid).first() if not custom or not custom.studies: abort(404) # Only generate images if the analysis has never been run, if changes have # been made since the last run, or if images are missing. if not custom.last_run_at or (custom.last_run_at < custom.updated_at) or \ not custom.images: ids = [s.pmid for s in custom.studies] if tasks.run_metaanalysis.delay(ids, custom.uuid).wait(): # Update analysis record rev_inf = '%s_pFgA_z_FDR_0.01.nii.gz' % custom.uuid rev_inf = join(settings.IMAGE_DIR, 'custom', rev_inf) fwd_inf = '%s_pAgF_z_FDR_0.01.nii.gz' % custom.uuid fwd_inf = join(settings.IMAGE_DIR, 'custom', fwd_inf) if exists(rev_inf): images = [ CustomAnalysisImage( name='%s (forward inference)' % custom.name, image_file=fwd_inf, label='%s (forward inference)' % custom.name, stat='z-score', display=1, download=1 ), CustomAnalysisImage( name='%s (reverse inference)' % custom.name, image_file=rev_inf, label='%s (reverse inference)' % custom.name, stat='z-score', display=1, download=1 ) ] custom.images = images custom.last_run_at = dt.datetime.utcnow() db.session.add(custom) db.session.commit() return redirect(url_for('analyses.show_custom_analysis', uid=uid)) return error_page("An unspecified error occurred while trying " "to run the custom meta-analysis. Please try " "again.") return redirect(url_for('analyses.show_custom_analysis', uid=uid))
def run_custom_analysis(uid): """ Given a uuid, kick off the analysis run and redirect the user to the results page once the analysis is complete. """ custom = CustomAnalysis.query.filter_by(uuid=uid).first() if not custom or not custom.studies: abort(404) # Only generate images if the analysis has never been run, if changes have # been made since the last run, or if images are missing. if not custom.last_run_at or (custom.last_run_at < custom.updated_at) or \ not custom.images: ids = [s.pmid for s in custom.studies] if tasks.run_metaanalysis.delay(ids, custom.uuid).wait(): # Update analysis record rev_inf = '%s_specificity_z_FDR_0.01.nii.gz' % custom.uuid rev_inf = join(settings.IMAGE_DIR, 'custom', rev_inf) fwd_inf = '%s_consistency_FDR_0.01.nii.gz' % custom.uuid fwd_inf = join(settings.IMAGE_DIR, 'custom', fwd_inf) if exists(rev_inf): images = [ CustomAnalysisImage(name='%s (consistency)' % custom.name, image_file=fwd_inf, label='%s (consistency)' % custom.name, stat='z-score', display=1, download=1), CustomAnalysisImage(name='%s (specificity)' % custom.name, image_file=rev_inf, label='%s (specificity)' % custom.name, stat='z-score', display=1, download=1) ] custom.images = images custom.last_run_at = dt.datetime.utcnow() db.session.add(custom) db.session.commit() return redirect( url_for('analyses.show_custom_analysis', uid=uid)) return error_page("An unspecified error occurred while trying " "to run the custom meta-analysis. Please try " "again.") return redirect(url_for('analyses.show_custom_analysis', uid=uid))
def run_custom_analysis(uid): """ Given a uuid, kick off the analysis run and redirect the user to the results page once the analysis is complete. """ custom = CustomAnalysis.query.filter_by(uuid=uid).first() if not custom or not custom.studies: abort(404) result = api_run_custom(uid) if result is None: return error_page("An unspecified error occurred while trying " "to run the custom meta-analysis. Please try " "again.") return redirect(url_for('analyses.show_custom_analysis', uid=uid))
def show(symbol): gene = Gene.query.filter_by(symbol=symbol).first() if gene is None: return error_page("We have no data for the gene '%s'" % symbol) image = gene.images[0] # Run decoder if it hasn't been run before dec = decode_analysis_image(image.id) url = url_for('api_images.download', val=image.id) images = [{ 'id': image.id, 'name': symbol, 'url': url, 'colorPalette': 'intense red-blue', 'download': url, 'sign': 'both' }] return render_template('genes/show.html', gene=gene, images=json.dumps(images), image_id=dec.uuid)