Example #1
0
def loci(request):
    chrom = request.GET.get('chrom', False)
    loop_list = request.GET.get('loop-list', False)

    # Get relative loci
    (loci_rel,
     chroms) = get_intra_chr_loops_from_looplist(path.join('data', loop_list),
                                                 chrom)

    loci_rel_chroms = np.column_stack(
        (chroms[:, 0], loci_rel[:, 0:2], chroms[:, 1], loci_rel[:, 2:4]))

    # Create results
    results = {'loci': rel_loci_2_obj(loci_rel_chroms)}

    return JsonResponse(results)
Example #2
0
def fragments_by_chr(request):
    chrom = request.GET.get('chrom', False)
    cooler_file = request.GET.get('cooler', False)
    loop_list = request.GET.get('loop-list', False)

    if cooler_file:
        if cooler_file.endswith('.cool'):
            cooler_file = path.join('data', cooler_file)
        else:
            try:
                cooler_file = Tileset.objects.get(
                    uuid=cooler_file).datafile.path
            except AttributeError:
                return JsonResponse({
                    'error': 'Cooler file not in database',
                },
                                    status=500)
    else:
        return JsonResponse({
            'error': 'Cooler file not specified',
        },
                            status=500)

    try:
        measures = request.GET.getlist('measures', [])
    except ValueError:
        measures = []

    try:
        zoomout_level = int(request.GET.get('zoomout-level', -1))
    except ValueError:
        zoomout_level = -1

    try:
        limit = int(request.GET.get('limit', -1))
    except ValueError:
        limit = -1

    try:
        precision = int(request.GET.get('precision', False))
    except ValueError:
        precision = False

    try:
        no_cache = bool(request.GET.get('no-cache', False))
    except ValueError:
        no_cache = False

    try:
        for_config = bool(request.GET.get('for-config', False))
    except ValueError:
        for_config = False

    # Get a unique string for the URL query string
    uuid = hashlib.md5('-'.join([
        cooler_file, chrom, loop_list,
        str(limit),
        str(precision),
        str(zoomout_level)
    ])).hexdigest()

    # Check if something is cached
    if not no_cache:
        try:
            results = rdb.get('frag_by_chrom_%s' % uuid)

            if results:
                return JsonResponse(pickle.loads(results))
        except:
            pass

    # Get relative loci
    try:
        (loci_rel, chroms) = get_intra_chr_loops_from_looplist(
            path.join('data', loop_list), chrom)
    except Exception as e:
        return JsonResponse(
            {
                'error': 'Could not retrieve loci.',
                'error_message': str(e)
            },
            status=500)

    # Convert to chromosome-relative loci list
    loci_rel_chroms = np.column_stack(
        (chroms[:, 0], loci_rel[:, 0:2], chroms[:, 1], loci_rel[:, 2:4]))

    if limit > 0:
        loci_rel_chroms = loci_rel_chroms[:limit]

    # Get fragments
    try:
        matrices = get_frag_by_loc(cooler_file,
                                   loci_rel_chroms,
                                   zoomout_level=zoomout_level)
    except Exception as e:
        return JsonResponse(
            {
                'error': 'Could not retrieve fragments.',
                'error_message': str(e)
            },
            status=500)

    if precision > 0:
        matrices = np.around(matrices, decimals=precision)

    fragments = []

    loci_struct = rel_loci_2_obj(loci_rel_chroms)

    # Check supported measures
    measures_applied = []
    for measure in measures:
        if measure in SUPPORTED_MEASURES:
            measures_applied.append(measure)

    i = 0
    for matrix in matrices:
        measures_values = []

        for measure in measures:
            if measure == 'distance-to-diagonal':
                measures_values.append(calc_measure_dtd(
                    matrix, loci_struct[i]))

            if measure == 'size':
                measures_values.append(
                    calc_measure_size(matrix, loci_struct[i]))

            if measure == 'noise':
                measures_values.append(calc_measure_noise(matrix))

            if measure == 'sharpness':
                measures_values.append(calc_measure_sharpness(matrix))

        frag_obj = {
            # 'matrix': matrix.tolist()
        }

        frag_obj.update(loci_struct[i])
        frag_obj.update({"measures": measures_values})
        fragments.append(frag_obj)
        i += 1

    # Create results
    results = {
        'count': matrices.shape[0],
        'dims': matrices.shape[1],
        'fragments': fragments,
        'measures': measures_applied,
        'relativeLoci': True,
        'zoomoutLevel': zoomout_level
    }

    if for_config:
        results['fragmentsHeader'] = [
            'chrom1', 'start1', 'end1', 'strand1', 'chrom2', 'start2', 'end2',
            'strand2'
        ] + measures_applied

        fragments_arr = []
        for fragment in fragments:
            tmp = [
                fragment['chrom1'],
                fragment['start1'],
                fragment['end1'],
                fragment['strand1'],
                fragment['chrom2'],
                fragment['start2'],
                fragment['end2'],
                fragment['strand2'],
            ] + fragment['measures']

            fragments_arr.append(tmp)

        results['fragments'] = fragments_arr

    # Cache results for 30 mins
    try:
        rdb.set('frag_by_chrom_%s' % uuid, pickle.dumps(results), 60 * 30)
    except Exception as ex:
        # error caching a tile
        # log the error and carry forward, this isn't critical
        logger.warn(ex)

    return JsonResponse(results)