def forwards(self, orm): for analysis in orm.Analysis.objects.all(): api_request = RequestDetailJSON(query={ 'analysis_id': analysis.analysis_id}) try: result = api_request.call().next() except StopIteration: print 'Bad analysis: %s, skipped.' % analysis.analysis_id continue analysis.aliquot_id = result.get('aliquot_id') analysis.analyte_code = result.get('analyte_code') analysis.center_name = result.get('center_name') analysis.checksum = result.get('checksum') analysis.disease_abbr = result.get('disease_abbr') analysis.filename = result.get('filename') analysis.legacy_sample_id = result.get('legacy_sample_id') analysis.library_strategy = result.get('library_strategy') analysis.published_date = result.get('published_date') analysis.participant_id = result.get('participant_id') analysis.platform = result.get('platform') analysis.refassem_short_name = result.get('refassem_short_name') analysis.sample_accession = result.get('sample_accession') analysis.sample_id = result.get('sample_id') analysis.sample_type = result.get('sample_type') analysis.study = result.get('study') analysis.tss_id = result.get('tss_id') analysis.upload_date = result.get('upload_date') analysis.save()
def summary_tsv_generator(request, compress=False): """ Return summary tsv for all cart items. :param request: django Request object :param compress: set to True to enable compression """ cart = Cart(request.session) zipper = Gzipper(filename='summary.tsv', compress=compress) COLUMNS = settings.TABLE_COLUMNS stringio = StringIO() csvwriter = CSVUnicodeWriter( stringio, quoting=csv.QUOTE_MINIMAL, dialect='excel-tab', lineterminator='\n') csvwriter.writerow( [field.lower().replace(' ', '_') for field in COLUMNS]) count_all = cart.all_count iterator = cart.cart.items.all().iterator() count = 0 while True: ids = [] for i in xrange(settings.MAX_ITEMS_IN_QUERY): try: ids.append(next(iterator).analysis.analysis_id) except StopIteration: break if not ids: break api_request = RequestDetailJSON(query={'analysis_id': ids}) for result in api_request.call(): fields = field_values(result, humanize_files_size=False) row = [] for field_name in COLUMNS: value = fields.get(field_name, '') row.append(unicode(value)) csvwriter.writerow(row) count += 1 stringio.seek(0) line = stringio.read() stringio.seek(0) stringio.truncate() zipper.write(line) yield zipper.read() if count != count_all: cart_logger.error('Error while composing summary tsv.') add_message( request=request, level='error', content='An error occured while composing summary tsv.') request.session.save() zipper.write(u'\nError!') yield zipper.close()
def manifest_xml_generator(request, compress=False): """ Return manifest xml for cart items with state==live. :param request: django Request object :param compress: set to True to enable compression """ cart = Cart(request.session) zipper = Gzipper(filename='manifest.xml', compress=compress) count_live = cart.live_count zipper.write(render_to_string('xml/analysis_xml_header.xml', { 'date': datetime.datetime.strftime( timezone.now(), '%Y-%d-%m %H:%M:%S'), 'len': count_live})) iterator = cart.cart.items.filter(analysis__state='live').iterator() counter = 0 downloadable_size = 0 result_template = get_template('xml/manifest_xml_result.xml') while True: ids = [] for i in xrange(settings.MAX_ITEMS_IN_QUERY): try: ids.append(next(iterator).analysis.analysis_id) except StopIteration: break if not ids: break api_request = RequestDetailJSON(query={'analysis_id': ids}) for result in api_request.call(): for f in result['files']: downloadable_size += f['filesize'] counter += 1 zipper.write(result_template.render(Context({ 'counter': counter, 'result': result, 'server_url': settings.CGHUB_DOWNLOAD_SERVER}))) yield zipper.read() if counter != count_live: cart_logger.error('Error while composing manifest xml.') add_message( request=request, level='error', content='An error occured while composing manifest xml.') request.session.save() else: zipper.write(render_to_string('xml/analysis_xml_summary.xml', { 'counter': counter, 'size': str(round(downloadable_size / 1073741824. * 100) / 100)})) yield zipper.close()
def post(self, request, analysis_id): api_request = RequestDetailJSON(query={'analysis_id': analysis_id}) try: result = api_request.call().next() except StopIteration: raise URLError('No results for analysis_id == %s' % analysis_id) try: with transaction.commit_on_success(): cart = Cart(request.session) cart.add(result) cart.update_stats() except DatabaseError: add_message( request, 'error', settings.DATABASE_ERROR_NOTIFICATION, once=True) return HttpResponseRedirect(reverse('cart_page'))
def update_analysis(data): """ Updates analysis with data.analysis_id. :param data: Result object (may contains only analysis_id or all attributes). """ try: analysis = Analysis.objects.get(analysis_id=data['analysis_id']) except Analysis.DoesNotExist: analysis = Analysis(analysis_id=data['analysis_id']) if 'platform' in data: result = data else: # get all attributes api_request = RequestDetailJSON(query={'analysis_id': data['analysis_id']}) result = api_request.call().next() for attr in CART_SORT_ATTRIBUTES: setattr(analysis, attr, result.get(attr)) try: analysis.save() except Exception as e: cart_logger.error('Error while creating new Analysis: %s' % str(e)) return None return analysis