def summary_tsv_generator(request, compress=False): """ Return summary tsv for all cart items. :param request: django Request object :param compress: set to True to enable compression """ cart = Cart(request.session) zipper = Gzipper(filename='summary.tsv', compress=compress) COLUMNS = settings.TABLE_COLUMNS stringio = StringIO() csvwriter = CSVUnicodeWriter( stringio, quoting=csv.QUOTE_MINIMAL, dialect='excel-tab', lineterminator='\n') csvwriter.writerow( [field.lower().replace(' ', '_') for field in COLUMNS]) count_all = cart.all_count iterator = cart.cart.items.all().iterator() count = 0 while True: ids = [] for i in xrange(settings.MAX_ITEMS_IN_QUERY): try: ids.append(next(iterator).analysis.analysis_id) except StopIteration: break if not ids: break api_request = RequestDetailJSON(query={'analysis_id': ids}) for result in api_request.call(): fields = field_values(result, humanize_files_size=False) row = [] for field_name in COLUMNS: value = fields.get(field_name, '') row.append(unicode(value)) csvwriter.writerow(row) count += 1 stringio.seek(0) line = stringio.read() stringio.seek(0) stringio.truncate() zipper.write(line) yield zipper.read() if count != count_all: cart_logger.error('Error while composing summary tsv.') add_message( request=request, level='error', content='An error occured while composing summary tsv.') request.session.save() zipper.write(u'\nError!') yield zipper.close()
def manifest_xml_generator(request, compress=False): """ Return manifest xml for cart items with state==live. :param request: django Request object :param compress: set to True to enable compression """ cart = Cart(request.session) zipper = Gzipper(filename='manifest.xml', compress=compress) count_live = cart.live_count zipper.write(render_to_string('xml/analysis_xml_header.xml', { 'date': datetime.datetime.strftime( timezone.now(), '%Y-%d-%m %H:%M:%S'), 'len': count_live})) iterator = cart.cart.items.filter(analysis__state='live').iterator() counter = 0 downloadable_size = 0 result_template = get_template('xml/manifest_xml_result.xml') while True: ids = [] for i in xrange(settings.MAX_ITEMS_IN_QUERY): try: ids.append(next(iterator).analysis.analysis_id) except StopIteration: break if not ids: break api_request = RequestDetailJSON(query={'analysis_id': ids}) for result in api_request.call(): for f in result['files']: downloadable_size += f['filesize'] counter += 1 zipper.write(result_template.render(Context({ 'counter': counter, 'result': result, 'server_url': settings.CGHUB_DOWNLOAD_SERVER}))) yield zipper.read() if counter != count_live: cart_logger.error('Error while composing manifest xml.') add_message( request=request, level='error', content='An error occured while composing manifest xml.') request.session.save() else: zipper.write(render_to_string('xml/analysis_xml_summary.xml', { 'counter': counter, 'size': str(round(downloadable_size / 1073741824. * 100) / 100)})) yield zipper.close()
def metadata_xml_generator(request, compress=False): """ Return metadata xml for all cart items. :param request: django Request object :param compress: set to True to enable compression """ cart = Cart(request.session) zipper = Gzipper(filename='metadata.xml', compress=compress) items = cart.cart.items.all() zipper.write(render_to_string('xml/analysis_xml_header.xml', { 'date': datetime.datetime.strftime( timezone.now(), '%Y-%d-%m %H:%M:%S'), 'len': items.count()})) counter = 0 downloadable_size = 0 result_template = get_template('xml/metadata_xml_result.xml') for item in items: analysis = item.analysis try: xml, files_size = get_analysis_xml( analysis_id=analysis.analysis_id, last_modified=analysis.last_modified) except AnalysisException as e: cart_logger.error('Error while composing metadata xml. %s' % str(e)) add_message( request=request, level='error', content='An error occured while composing metadata/manifest xml file.') request.session.save() return counter += 1 downloadable_size += analysis.files_size zipper.write(result_template.render(Context({ 'counter': counter, 'xml': xml.strip()}))) yield zipper.read() zipper.write(render_to_string('xml/analysis_xml_summary.xml', { 'counter': counter, 'size': str(round(downloadable_size / 1073741824. * 100) / 100)})) yield zipper.close()
def urls_tsv_generator(request, compress=False): """ Returns analysis_data_uri urls tsv for all cart items. :param request: django Request object :param compress: set to True to enable compression """ cart = Cart(request.session) zipper = Gzipper(filename='urls.txt', compress=compress) iterator = cart.cart.items.all().iterator() count_all = cart.all_count count = 0 while True: urls_list = [] for i in xrange(settings.MAX_ITEMS_IN_QUERY): try: urls_list.append(next(iterator).analysis.analysis_id) except StopIteration: break if not urls_list: break for i in urls_list: zipper.write('%s/cghub/data/analysis/download/%s\n' % ( settings.CGHUB_DOWNLOAD_SERVER, i )) count += 1 yield zipper.read() if count != count_all: cart_logger.error('Error while composing urls tsv.') add_message( request=request, level='error', content='An error occured while composing urls tsv.') request.session.save() zipper.write(u'\nError!') yield zipper.close()