Пример #1
0
    def get_coolers(self, table, res=1000000):

        names = table['lib_name'].values
        cool_dict = defaultdict(list)

        for name in names:
            if name not in self.metadata['lib_name'].values:
                print(f'Name: {name} not found in metadata. Skipping')
                continue

            cool_dict['lib_name'].append(name)
            flag = True
            for cpath in self.cooler_paths:
                if f'{name}.hg38.mapq_30.1000.mcool' in os.listdir(cpath):
                    flag = False
                    cool = Cooler(
                        cpath +
                        f'{name}.hg38.mapq_30.1000.mcool::/resolutions/{res}')
                    cool_dict[f'cooler_{res}'].append(cool)

            if flag:
                print(
                    f'Cooler not found matching {name}. Appending np.nan to appropriate row'
                )
                cool_dict[f'cooler_{res}'].append(np.nan)

        df = pd.DataFrame(cool_dict)
        df = table.copy(deep=True).merge(df, on='lib_name', how='outer')
        return df
Пример #2
0
def matrix_balance(cool_uri,
                   nproc=1,
                   chunksize=int(1e7),
                   mad_max=5,
                   min_nnz=10,
                   min_count=0,
                   ignore_diags=1,
                   tol=1e-5,
                   max_iters=1000):
    '''
    Perform separate matrix balancing for regions with different copy numbers
    and output the bias vector in the "sweight" column.
    '''
    cool_path, group_path = util.parse_cooler_uri(cool_uri)
    # Overwrite the existing sweight column
    with h5py.File(cool_path, 'r+') as h5:
        grp = h5[group_path]
        if 'sweight' in grp['bins']:
            del grp['bins']['sweight']

    clr = Cooler(cool_uri)

    try:
        if nproc > 1:
            pool = balance.Pool(nproc)
            map_ = pool.imap_unordered
        else:
            map_ = map

        bias, stats = iterative_correction(clr,
                                           chunksize=chunksize,
                                           tol=tol,
                                           min_nnz=min_nnz,
                                           min_count=min_count,
                                           mad_max=mad_max,
                                           max_iters=max_iters,
                                           ignore_diags=ignore_diags,
                                           rescale_marginals=True,
                                           use_lock=False,
                                           map=map_)
    finally:
        if nproc > 1:
            pool.close()

    if not stats['converged']:
        logger.error('Iteration limit reached without convergence')
        logger.error('Storing final result. Check log to assess convergence.')

    with h5py.File(cool_path, 'r+') as h5:
        grp = h5[group_path]
        # add the bias column to the file
        h5opts = dict(compression='gzip', compression_opts=6)
        grp['bins'].create_dataset('sweight', data=bias, **h5opts)
        grp['bins']['sweight'].attrs.update(stats)
Пример #3
0
def correlate(x_mcool, y_mcool, output_prefix, resolution):
    from pore_c.analyses.matrix import correlate
    from cooler import Cooler

    file_paths = catalogs.MatrixCorrelationCatalog.generate_paths(
        output_prefix)

    x_cool = Cooler(str(x_mcool) + f"::/resolutions/{resolution}")
    y_cool = Cooler(str(y_mcool) + f"::/resolutions/{resolution}")

    x_chrom_names = set(x_cool.chromnames)
    y_chrom_names = set(y_cool.chromnames)

    if x_chrom_names != y_chrom_names:
        x_not_y = x_chrom_names - y_chrom_names
        y_not_x = y_chrom_names - x_chrom_names
        if x_not_y and y_not_x:
            raise ValueError(
                f"Chromosomes are not sub/supersets x:{x_not_y}, y:{y_not_x}")
        elif x_not_y:
            logger.warning(
                f"Extra chromosomes in x, will not be included in calculations: {x_not_y}"
            )
        else:
            logger.warning(
                f"Extra chromosomes in y, will not be included in calculations: {y_not_x}"
            )

    metadata = correlate(x_cool,
                         y_cool,
                         xy_path=file_paths["xy"],
                         coefficients_path=file_paths["coefficients"],
                         resolution=resolution)
    metadata["resolution"] = resolution
    metadata["x"]["path"] = str(x_mcool)
    metadata["y"]["path"] = str(y_mcool)

    matrix_cat = catalogs.MatrixCorrelationCatalog.create(
        file_paths, metadata, {})
    logger.info(str(matrix_cat))
Пример #4
0
    def __init__(self, fan_pin, peltier_pin, step_pin, thermometer_pin):
        self.cooler = Cooler(Pin(fan_pin, Pin.OUT), Pin(peltier_pin, Pin.OUT))
        self.cooler.fanOn()
        self.cooler.coolerHigh()

        self.pid = Pid(17, 1, 0.02, 0,
                       0.995)  #(temperature, P, I, D, memoryFactor)
        self.thermometer = Thermometer(thermometer_pin)
        self.pump = PWMPump(Pin(step_pin, Pin.OUT))
        for i in range(10):
            self.pump.pwm.freq(i * 1500)
            utime.sleep(0.1)
        self.pump.pwm.freq(15000)
Пример #5
0
def parse_cooler(
    cooler_file: str, 
    regions: Dict[str, np.ndarray]
) -> Tuple[Cooler, List[np.ndarray]]:
    # Load cooler
    c = Cooler(cooler_file)

    # Fetch relevant bin_ids from the cooler file
    b_ids = fetch_bins_from_cooler(cooler, regions)
    # Identify unique bin_ids and isolate disjoint regions
    slices = get_unique_bins(b_ids)

    return c, slices
    def __init__(self, source_uri, bins, chunksize, batchsize, map=map):
        from cooler.api import Cooler
        self._map = map
        self.source_uri = source_uri
        self.chunksize = chunksize
        self.batchsize = batchsize

        clr = Cooler(source_uri)
        self._size = clr.info['nnz']
        self.old_binsize = clr.binsize
        self.old_chrom_offset = clr._load_dset('indexes/chrom_offset')
        self.old_bin1_offset = clr._load_dset('indexes/bin1_offset')
        self.gs = GenomeSegmentation(clr.chromsizes, bins)
        self.new_binsize = get_binsize(bins)
        assert self.new_binsize % self.old_binsize == 0
        self.factor = self.new_binsize // self.old_binsize
    def _aggregate(self, span):
        from cooler.api import Cooler
        lo, hi = span

        clr = Cooler(self.source_uri)
        # convert_enum=False returns chroms as raw ints
        table = clr.pixels(join=True, convert_enum=False)
        chunk = table[lo:hi]
        # logger.info('{} {}'.format(lo, hi))
        print('{} {}'.format(lo, hi))

        # use the "start" point as anchor for re-binning
        # XXX - alternatives: midpoint anchor, proportional re-binning
        binsize = self.gs.binsize
        chrom_binoffset = self.gs.chrom_binoffset
        chrom_abspos = self.gs.chrom_abspos
        start_abspos = self.gs.start_abspos

        chrom_id1 = chunk['chrom1'].values
        chrom_id2 = chunk['chrom2'].values
        start1 = chunk['start1'].values
        start2 = chunk['start2'].values
        if binsize is None:
            abs_start1 = chrom_abspos[chrom_id1] + start1
            abs_start2 = chrom_abspos[chrom_id2] + start2
            chunk['bin1_id'] = np.searchsorted(
                start_abspos, abs_start1, side='right') - 1
            chunk['bin2_id'] = np.searchsorted(
                start_abspos, abs_start2, side='right') - 1
        else:
            rel_bin1 = np.floor(start1 / binsize).astype(int)
            rel_bin2 = np.floor(start2 / binsize).astype(int)
            chunk['bin1_id'] = chrom_binoffset[chrom_id1] + rel_bin1
            chunk['bin2_id'] = chrom_binoffset[chrom_id2] + rel_bin2

        grouped = chunk.groupby(['bin1_id', 'bin2_id'], sort=False)
        return grouped['count'].sum().reset_index()
Пример #8
0
from flask import Flask, redirect, url_for, render_template, request

from cooler import Cooler

# globals
app = Flask(__name__)
cooler = Cooler(21, 20, 16)


@app.route('/', methods=('GET', 'POST'))
def index():
    if request.method == 'GET':
        return render_template('index.html', **cooler.__dict__)

    if request.method == 'POST':
        cooler.control_mode = request.form['mode']

        if request.form['mode'] == 'automatic':
            # TODO: check rationality if thresholds
            try:
                cooler.min_threshold = float(request.form['min_threshold'])
            except Exception as E:
                pass
            try:
                cooler.max_threshold = float(request.form['max_threshold'])
            except Exception as E:
                pass
            cooler.set_speed(False)
            return redirect('/update')

        if request.form['mode'] == 'manual':
Пример #9
0
#!/usr/bin/env python
# encoding: utf-8

from cooler import Cooler

if __name__ == '__main__':
    near_cool = Cooler()
    near_cool.run()
Пример #10
0
def export_to_cooler(
    contact_table,
    output_prefix,
    cooler_resolution,
    fragment_table,
    chromsizes,
    query,
    query_columns=None,
    by_haplotype=False,
):

    results = []
    if query_columns:
        columns = query_columns[:]
    else:
        columns = []
    columns.extend(["align1_fragment_id", "align2_fragment_id"])
    if by_haplotype:
        columns.extend(["align1_haplotype", "align2_haplotype"])
    contact_df = dd.read_parquet(contact_table,
                                 engine=PQ_ENGINE,
                                 version=PQ_VERSION,
                                 columns=columns,
                                 index=False)
    if query:
        contact_df = contact_df.query(query)

    chrom_dict = pd.read_csv(chromsizes,
                             sep="\t",
                             header=None,
                             names=["chrom", "size"],
                             index_col=["chrom"],
                             squeeze=True)
    # create even-widht bins using cooler
    bins_df = binnify(chrom_dict, cooler_resolution)
    bins_df.index.name = "bin_id"
    # convert to ranges for overlap
    bins = pr.PyRanges(bins_df.reset_index().rename(columns={
        "start": "Start",
        "end": "End",
        "chrom": "Chromosome"
    }))

    fragment_df = dd.read_parquet(fragment_table,
                                  engine=PQ_ENGINE,
                                  version=PQ_VERSION).compute()
    midpoint_df = pr.PyRanges(
        fragment_df.reset_index()[[
            "chrom", "start", "end", "fragment_id"
        ]].assign(start=lambda x: ((x.start + x.end) * 0.5).round(0).astype(
            int)).eval("end = start + 1").rename(columns={
                "chrom": "Chromosome",
                "start": "Start",
                "end": "End"
            }))
    # use a pyranges joing to assign fragments to bins
    fragment_to_bin = midpoint_df.join(
        bins, how="left").df[["fragment_id", "bin_id"]]
    fragment_to_bin = fragment_to_bin.set_index(
        "fragment_id").sort_index()  # .astype(np.uint32)
    nulls = fragment_to_bin["bin_id"] == -1
    if nulls.any():
        logger.warning(
            "Some fragments did not overlap bins, removing from analysis:\n{}".
            format(fragment_to_bin[nulls].join(fragment_df)))
        fragment_to_bin = fragment_to_bin[~nulls]

    # use a join to assign each end of a contact to a bin
    binned_contacts = (contact_df.merge(
        fragment_to_bin,
        how="inner",
        right_index=True,
        left_on="align1_fragment_id").merge(
            fragment_to_bin,
            how="inner",
            right_index=True,
            left_on="align2_fragment_id",
            suffixes=[None, "_2"]).rename(columns={
                "bin_id": "bin1_id",
                "bin_id_2": "bin2_id"
            }))

    if not by_haplotype:
        cooler_path = output_prefix + ".cool"
        # group size == number of contacts per bin_pair
        pixels = binned_contacts.groupby(
            ["bin1_id",
             "bin2_id"]).size().rename("count").astype(np.int32).reset_index()
        create_cooler(cooler_path,
                      bins_df,
                      pixels,
                      ordered=True,
                      symmetric_upper=True,
                      ensure_sorted=True)
        c = Cooler(cooler_path)
        logger.info(f"Created cooler: {c.info}")
        results.append(cooler_path)
    else:
        tmp_parquet = output_prefix + ".tmp.pq"
        pixels = (
            # create a key to groupy by haplotype pair, order of haplotypes doesn't matter
            binned_contacts.assign(
                hap_key=lambda x: x[["align1_haplotype", "align2_haplotype"]
                                    ].apply(lambda y: "{}_{}".format(*sorted(
                                        y)).replace("-1", "nohap"),
                                            axis=1,
                                            meta="object")
            ).groupby(["hap_key", "bin1_id",
                       "bin2_id"]).size().rename("count").astype(
                           np.int32
                       ).reset_index().astype({"hap_key": "category"}))

        # save to a temporary parquet file, this might not be necessary
        # but want to avoid the whole contact matrix hitting memory
        pixels.to_parquet(
            tmp_parquet,
            write_metadata_file=True,
            partition_on=["hap_key"],
            write_index=False,
            engine=PQ_ENGINE,
            version=PQ_VERSION,
        )

        pixels = dd.read_parquet(tmp_parquet,
                                 engine=PQ_ENGINE,
                                 version=PQ_VERSION,
                                 columns=["hap_key"],
                                 index=False)
        hap_keys = pixels["hap_key"].unique().compute()
        # create a cooler for each haplotype pair
        for hap_key in hap_keys:
            cooler_path = f"{output_prefix}.{hap_key}.cool"
            pixels = dd.read_parquet(
                tmp_parquet,
                filters=[("hap_key", "==", hap_key)],
                index=False,
                engine=PQ_ENGINE,
                version=PQ_VERSION,
                columns=["bin1_id", "bin2_id", "count"],
            )
            create_cooler(cooler_path,
                          bins_df,
                          pixels,
                          ordered=True,
                          symmetric_upper=True,
                          ensure_sorted=True)
            c = Cooler(cooler_path)
            logger.info(f"Created cooler: {c.info}")
            results.append(cooler_path)

        shutil.rmtree(tmp_parquet)

    return results
Пример #11
0
def main():
    """Balance of plant of a boiling water nuclear reactor.

    Attributes
    ----------
    end_time: float
        End of the flow time in SI unit.
    time_step: float
        Size of the time step between port communications in SI unit.
    use_mpi: bool
        If set to `True` use MPI otherwise use Python multiprocessing.

    """

    # Preamble

    end_time = 30.0 * unit.minute
    time_step = 30.0  # seconds
    show_time = (True, 5 * unit.minute)

    use_mpi = False  # True for MPI; False for Python multiprocessing
    plot_results = True  # True for enabling plotting section below
    params = get_params()  # parameters for BoP BWR

    #*****************************************************************************
    # Define Cortix system

    # System top level
    plant = Cortix(use_mpi=use_mpi, splash=True)

    # Network
    plant_net = plant.network = Network()

    params['start-time'] = 0.0
    params['end-time'] = end_time
    params['shutdown-time'] = 999.0 * unit.hour
    params['shutdown-mode'] = False
    #*****************************************************************************
    # Create reactor module
    reactor = BWR(params)

    reactor.name = 'BWR'
    reactor.save = True
    reactor.time_step = time_step
    reactor.end_time = end_time
    reactor.show_time = show_time
    reactor.RCIS = True

    # Add reactor module to network
    plant_net.module(reactor)

    #*****************************************************************************
    # Create turbine high pressure module
    params['turbine_inlet_pressure'] = 2
    params['turbine_outlet_pressure'] = 0.5
    params['high_pressure_turbine'] = True

    #params_turbine = reactor.params
    #params_turbine.inlet_pressure = 2
    #params.turbine_outlet_pressure = 0.5

    turbine_hp = Turbine(params)

    turbine_hp.name = 'High Pressure Turbine'
    turbine_hp.save = True
    turbine_hp.time_step = time_step
    turbine_hp.end_time = end_time

    # Add turbine high pressure module to network
    plant_net.module(turbine_hp)

    #*****************************************************************************
    # Create turbine low pressure module
    params['turbine_inlet_pressure'] = 0.5
    params['turbine_outlet_pressure'] = 0.005
    params['high_pressure_turbine'] = False
    params['steam flowrate'] = params['steam flowrate'] / 2

    turbine_lp1 = Turbine(params)

    turbine_lp1.name = 'Low Pressure Turbine 1'
    turbine_lp1.save = True
    turbine_lp1.time_step = time_step
    turbine_lp1.end_time = end_time

    plant_net.module(turbine_lp1)

    #*****************************************************************************
    # Create turbine low pressure module
    params['turbine_inlet_pressure'] = 0.5
    params['turbine_outlet_pressure'] = 0.005
    params['high_pressure_turbine'] = False

    turbine_lp2 = Turbine(params)

    turbine_lp2.name = 'Low Pressure Turbine 2'
    turbine_lp2.save = True
    turbine_lp2.time_step = time_step
    turbine_lp2.end_time = end_time

    plant_net.module(turbine_lp2)

    #*****************************************************************************
    # Create condenser module
    params['steam flowrate'] = params['steam flowrate'] * 2

    condenser = Condenser()

    condenser.name = 'Condenser'
    condenser.save = True
    condenser.time_step = time_step
    condenser.end_time = end_time

    plant_net.module(condenser)

    #*****************************************************************************
    params['RCIS-shutdown-time'] = 5 * unit.minute
    rcis = Cooler(params)
    rcis.name = 'RCIS'
    rcis.save = True
    rcis.time_step = time_step
    rcis.end_time = end_time

    plant_net.module(rcis)

    #*****************************************************************************
    # Create the BoP network connectivity
    plant_net.connect([reactor, 'coolant-outflow'], [turbine_hp, 'inflow'])
    plant_net.connect([turbine_hp, 'outflow-1'], [turbine_lp1, 'inflow'])
    plant_net.connect([turbine_hp, 'outflow-2'], [turbine_lp2, 'inflow'])
    plant_net.connect([turbine_lp1, 'outflow-1'], [condenser, 'inflow-1'])
    plant_net.connect([turbine_lp2, 'outflow-1'], [condenser, 'inflow-2'])
    plant_net.connect([condenser, 'outflow'], [reactor, 'coolant-inflow'])
    plant_net.connect([reactor, 'RCIS-outflow'], [rcis, 'coolant-inflow'])
    plant_net.connect([rcis, 'coolant-outflow'], [reactor, 'RCIS-inflow'])
    #plant_net.connect([rcis, 'signal-in'], [reactor, 'signal-out'])

    plant_net.draw(engine='dot', node_shape='folder')
    #*****************************************************************************
    # Run network dynamics simulation
    plant.run()

    #*****************************************************************************
    # Plot results

    if plot_results and (plant.use_multiprocessing or plant.rank == 0):

        # Reactor plots
        reactor = plant_net.modules[0]

        (quant, time_unit
         ) = reactor.neutron_phase.get_quantity_history('neutron-dens')
        quant.plot(x_scaling=1 / unit.minute,
                   x_label='Time [m]',
                   y_label=quant.latex_name + ' [' + quant.unit + ']')

        plt.grid()
        plt.savefig('startup-neutron-dens.png', dpi=300)

        (quant, time_unit
         ) = reactor.neutron_phase.get_quantity_history('delayed-neutrons-cc')
        quant.plot(x_scaling=1 / unit.minute,
                   x_label='Time [m]',
                   y_label=quant.latex_name + ' [' + quant.unit + ']')

        plt.grid()
        plt.savefig('startup-delayed-neutrons-cc.png', dpi=300)

        (quant, time_unit
         ) = reactor.coolant_outflow_phase.get_quantity_history('temp')

        quant.plot(x_scaling=1 / unit.minute,
                   x_label='Time [m]',
                   y_label=quant.latex_name + ' [' + quant.unit + ']')

        plt.grid()
        plt.savefig('startup-coolant-outflow-temp.png', dpi=300)

        (quant,
         time_unit) = reactor.reactor_phase.get_quantity_history('fuel-temp')

        quant.plot(x_scaling=1 / unit.minute,
                   x_label='Time [m]',
                   y_label=quant.latex_name + ' [' + quant.unit + ']')
        plt.grid()
        plt.savefig('startup-fuel-temp.png', dpi=300)

        # Turbine high pressure plots
        turbine_hp = plant_net.modules[1]

        (quant,
         time_unit) = turbine_hp.outflow_phase.get_quantity_history('power')

        quant.plot(x_scaling=1 / unit.minute,
                   x_label='Time [m]',
                   y_label=quant.latex_name + ' [' + quant.unit + ']',
                   title='High Pressure Turbine Power')
        plt.grid()
        plt.savefig('startup-turbine-hp-power.png', dpi=300)

        (quant,
         time_unit) = turbine_hp.outflow_phase.get_quantity_history('temp')

        quant.plot(x_scaling=1 / unit.minute,
                   x_label='Time [m]',
                   y_label=quant.latex_name + ' [' + quant.unit + ']',
                   title='High Pressure Turbine Outflow Temperature')
        plt.grid()
        plt.savefig('startup-turbine-hp-outflow-temp.png', dpi=300)

        # Turbine low pressure graphs
        turbine_lp1 = plant_net.modules[2]

        (quant,
         time_unit) = turbine_lp1.outflow_phase.get_quantity_history('power')

        quant.plot(x_scaling=1 / unit.minute,
                   x_label='Time [m]',
                   y_label=quant.latex_name + ' [' + quant.unit + ']',
                   title='Lower Pressure Turbine 1 Power')
        plt.grid()
        plt.savefig('startup-turbine-lp1-power.png', dpi=300)

        (quant,
         time_unit) = turbine_lp1.outflow_phase.get_quantity_history('temp')

        quant.plot(x_scaling=1 / unit.minute,
                   x_label='Time [m]',
                   y_label=quant.latex_name + ' [' + quant.unit + ']',
                   title='Lower Pressure Turbine 1 Outflow Temperature')
        plt.grid()
        plt.savefig('startup-turbine-lp1-outflow-temp.png', dpi=300)

        # Condenser graphs
        condenser = plant_net.modules[3]

        (quant,
         time_unit) = condenser.outflow_phase.get_quantity_history('temp')

        quant.plot(x_scaling=1 / unit.minute,
                   x_label='Time [m]',
                   y_label=quant.latex_name + ' [' + quant.unit + ']')
        plt.grid()
        plt.savefig('startup-condenser-outflow-temp.png', dpi=300)

    #setup initial values for simulation
    turbine1_outflow_temp = turbine_hp.outflow_phase.get_value(
        'temp', end_time)
    turbine1_chi = turbine_hp.outflow_phase.get_value('quality', end_time)
    turbine1_power = turbine_hp.outflow_phase.get_value('power', end_time)

    turbine2_outflow_temp = turbine_lp1.outflow_phase.get_value(
        'temp', end_time)
    turbine2_chi = turbine_lp1.outflow_phase.get_value('quality', end_time)
    turbine2_power = turbine_lp1.outflow_phase.get_value('power', end_time)

    condenser_runoff_temp = condenser.outflow_phase.get_value('temp', end_time)

    delayed_neutron_cc = reactor.neutron_phase.get_value(
        'delayed-neutrons-cc', end_time)
    n_dens = reactor.neutron_phase.get_value('neutron-dens', end_time)
    fuel_temp = reactor.reactor_phase.get_value('fuel-temp', end_time)
    coolant_temp = reactor.coolant_outflow_phase.get_value('temp', end_time)
    # Values loaded into params when they are needed (module instantiation)

    # Properly shutdown simulation
    plant.close()

    # Now we run shutdown as a seperate simulation with starting parameters equal to the ending
    # values of the startup simulation

    #**************************************************************************************************

    # Preamble

    start_time = 0.0 * unit.minute
    end_time = 60 * unit.minute
    time_step = 30.0  # seconds
    show_time = (True, 5 * unit.minute)

    use_mpi = False  # True for MPI; False for Python multiprocessing
    plot_results = True  # True for enabling plotting section below
    params = get_params()  # clear params, just to be safe

    #*****************************************************************************
    # Define Cortix system

    # System top level
    plant = Cortix(use_mpi=use_mpi, splash=True)

    # Network
    plant_net = plant.network = Network()

    params['start-time'] = start_time
    params['end-time'] = end_time
    params['shutdown time'] = 0.0
    params['shutdown-mode'] = True

    #*****************************************************************************
    # Create reactor module
    params['delayed-neutron-cc'] = delayed_neutron_cc
    params['n-dens'] = n_dens
    params['fuel-temp'] = fuel_temp
    params['coolant-temp'] = coolant_temp
    params['operating-mode'] = 'shutdown'
    reactor = BWR(params)

    reactor.name = 'BWR'
    reactor.save = True
    reactor.time_step = time_step
    reactor.end_time = end_time
    reactor.show_time = show_time
    reactor.RCIS = False

    # Add reactor module to network
    plant_net.module(reactor)

    #*****************************************************************************
    # Create turbine high pressure module
    params['turbine_inlet_pressure'] = 2
    params['turbine_outlet_pressure'] = 0.5
    params['high_pressure_turbine'] = True
    params['turbine-outflow-temp'] = turbine1_outflow_temp
    params['turbine-chi'] = turbine1_chi
    params['turbine-work'] = turbine1_power
    params['turbine-inflow-temp'] = coolant_temp

    #params_turbine = reactor.params
    #params_turbine.inlet_pressure = 2
    #params.turbine_outlet_pressure = 0.5

    turbine_hp = Turbine(params)

    turbine_hp.name = 'High Pressure Turbine'
    turbine_hp.save = True
    turbine_hp.time_step = time_step
    turbine_hp.end_time = end_time

    # Add turbine high pressure module to network
    plant_net.module(turbine_hp)

    #*****************************************************************************
    # Create turbine low pressure 1 module
    params['turbine_inlet_pressure'] = 0.5
    params['turbine_outlet_pressure'] = 0.005
    params['high_pressure_turbine'] = False
    params['steam flowrate'] = params['steam flowrate'] / 2
    params['turbine-outflow-temp'] = turbine2_outflow_temp
    params['turbine-inflow-temp'] = turbine1_outflow_temp
    params['turbine-chi'] = turbine2_chi
    params['turbine-work'] = turbine2_power

    turbine_lp1 = Turbine(params)

    turbine_lp1.name = 'Low Pressure Turbine 1'
    turbine_lp1.save = True
    turbine_lp1.time_step = time_step
    turbine_lp1.end_time = end_time

    plant_net.module(turbine_lp1)

    #*****************************************************************************
    # Create turbine low pressure 2 module
    params['turbine_inlet_pressure'] = 0.5
    params['turbine_outlet_pressure'] = 0.005
    params['high_pressure_turbine'] = False

    turbine_lp2 = Turbine(params)

    turbine_lp2.name = 'Low Pressure Turbine 2'
    turbine_lp2.save = True
    turbine_lp2.time_step = time_step
    turbine_lp2.end_time = end_time

    plant_net.module(turbine_lp2)

    #*****************************************************************************
    # Create condenser module
    params['steam flowrate'] = params['steam flowrate'] * 2
    params['condenser-runoff-temp'] = condenser_runoff_temp
    condenser = Condenser()

    condenser.name = 'Condenser'
    condenser.save = True
    condenser.time_step = time_step
    condenser.end_time = end_time

    plant_net.module(condenser)

    #*****************************************************************************
    params['RCIS-shutdown-time'] = -1 * unit.minute
    rcis = Cooler(params)
    rcis.name = 'RCIS'
    rcis.save = True
    rcis.time_step = time_step
    rcis.end_time = end_time

    plant_net.module(rcis)

    #*****************************************************************************
    # Create the BoP network connectivity
    plant_net.connect([reactor, 'coolant-outflow'], [turbine_hp, 'inflow'])
    plant_net.connect([turbine_hp, 'outflow-1'], [turbine_lp1, 'inflow'])
    plant_net.connect([turbine_hp, 'outflow-2'], [turbine_lp2, 'inflow'])
    plant_net.connect([turbine_lp1, 'outflow-1'], [condenser, 'inflow-1'])
    plant_net.connect([turbine_lp2, 'outflow-1'], [condenser, 'inflow-2'])
    plant_net.connect([condenser, 'outflow'], [reactor, 'coolant-inflow'])
    plant_net.connect([reactor, 'RCIS-outflow'], [rcis, 'coolant-inflow'])
    plant_net.connect([rcis, 'coolant-outflow'], [reactor, 'RCIS-inflow'])
    #plant_net.connect([rcis, 'signal-in'], [reactor, 'signal-out'])

    plant_net.draw(engine='dot', node_shape='folder')
    #*****************************************************************************
    # Run network dynamics simulation
    plant.run()

    #*****************************************************************************
    # Plot results

    if plot_results and (plant.use_multiprocessing or plant.rank == 0):

        # Reactor plots
        reactor = plant_net.modules[0]

        (quant, time_unit
         ) = reactor.neutron_phase.get_quantity_history('neutron-dens')
        quant.plot(x_scaling=1 / unit.minute,
                   x_label='Time [m]',
                   y_label=quant.latex_name + ' [' + quant.unit + ']')

        plt.grid()
        plt.savefig('shutdown-neutron-dens.png', dpi=300)

        (quant, time_unit
         ) = reactor.neutron_phase.get_quantity_history('delayed-neutrons-cc')
        quant.plot(x_scaling=1 / unit.minute,
                   x_label='Time [m]',
                   y_label=quant.latex_name + ' [' + quant.unit + ']')

        plt.grid()
        plt.savefig('shutdown-delayed-neutrons-cc.png', dpi=300)

        (quant, time_unit
         ) = reactor.coolant_outflow_phase.get_quantity_history('temp')

        quant.plot(x_scaling=1 / unit.minute,
                   x_label='Time [m]',
                   y_label=quant.latex_name + ' [' + quant.unit + ']')

        plt.grid()
        plt.savefig('shutdown-coolant-outflow-temp.png', dpi=300)

        (quant,
         time_unit) = reactor.reactor_phase.get_quantity_history('fuel-temp')

        quant.plot(x_scaling=1 / unit.minute,
                   x_label='Time [m]',
                   y_label=quant.latex_name + ' [' + quant.unit + ']')
        plt.grid()
        plt.savefig('shutdown-fuel-temp.png', dpi=300)

        # Turbine high pressure plots
        turbine_hp = plant_net.modules[1]

        (quant,
         time_unit) = turbine_hp.outflow_phase.get_quantity_history('power')

        quant.plot(x_scaling=1 / unit.minute,
                   x_label='Time [m]',
                   y_label=quant.latex_name + ' [' + quant.unit + ']',
                   title='High Pressure Turbine Power')
        plt.grid()
        plt.savefig('shutdown-turbine-hp-power.png', dpi=300)

        (quant,
         time_unit) = turbine_hp.outflow_phase.get_quantity_history('temp')

        quant.plot(x_scaling=1 / unit.minute,
                   x_label='Time [m]',
                   y_label=quant.latex_name + ' [' + quant.unit + ']',
                   title='High Pressure Turbine Outflow Temperature')
        plt.grid()
        plt.savefig('shutdown-turbine-hp-outflow-temp.png', dpi=300)

        # Turbine low pressure graphs
        turbine_lp1 = plant_net.modules[2]

        (quant,
         time_unit) = turbine_lp1.outflow_phase.get_quantity_history('power')

        quant.plot(x_scaling=1 / unit.minute,
                   x_label='Time [m]',
                   y_label=quant.latex_name + ' [' + quant.unit + ']',
                   title='Lower Pressure Turbine 1 Power')
        plt.grid()
        plt.savefig('shutdown-turbine-lp1-power.png', dpi=300)

        (quant,
         time_unit) = turbine_lp1.outflow_phase.get_quantity_history('temp')

        quant.plot(x_scaling=1 / unit.minute,
                   x_label='Time [m]',
                   y_label=quant.latex_name + ' [' + quant.unit + ']',
                   title='Lower Pressure Turbine 1 Outflow Temperature')
        plt.grid()
        plt.savefig('shutdown-turbine-lp1-outflow-temp.png', dpi=300)

        # Condenser graphs
        condenser = plant_net.modules[4]

        (quant,
         time_unit) = condenser.outflow_phase.get_quantity_history('temp')

        quant.plot(x_scaling=1 / unit.minute,
                   x_label='Time [m]',
                   y_label=quant.latex_name + ' [' + quant.unit + ']')
        plt.grid()
        plt.savefig('shutdown-condenser-outflow-temp.png', dpi=300)

    # Shutdown The Simulation
    plant.close()
        for i in range(0, len(spans), batchsize):
            try:
                lock.acquire()
                print("right before collapse ...{}  {}".format(
                    i, spans[i:i + batchsize]))
                results = self._map(self.aggregate, spans[i:i + batchsize])
            finally:
                lock.release()
            for df in results:
                # yield {k: v.values for k, v in six.iteritems(df)}
                yield df


input_uri = ""

c = Cooler(input_uri)

new_bins = binnify(c.chromsizes, 2 * c.binsize)

iterator = CoolerAggregator(input_uri, new_bins, 1000000, batchsize=1, map=map)

# # last message before it fails ...
# # INFO:cooler:17868809 17872380
# for ii in iterator:
#     print(ii)

# from cooler.api import Cooler
lo, hi = 17869999, 17872300
# lo, hi = 17868809, 17872380

clr = Cooler(input_uri)