Inputs: A schedule of events based on entered times.

Outputs: Sequence of events to a screen as they happen. 
daily flow rate data
'''
from toiletschedule import Schedule
from flow import Flow
import datetime
import time

workingSchedule = Schedule()
midnight = datetime.time()
nextStartTime = datetime.time()
nextEndTime = datetime.time()
meter = Flow()
counter = 1


meter.disableStepper() # prevent the motor for burning up
while True:
    print "\n1: Run Schedule"
    print "2: Manage Schedules"
    print "3: Exit"
    option = int(raw_input("\nPlease select an option.\n"))

    #run schedule: set midnight time, set flow enable to high (off). 
    #Import a schedule or sort and use current schedule
    if option == 1:
        currentTime = datetime.datetime.time(datetime.datetime.now()).replace(microsecond = 0)
        midnight = currentTime.replace(hour = 23, minute = 59, second = 59)
Example #2
0
        #Sample flow
        flow = Flow(f)
        flow.run()
    elif msg.topic == 'hermes/intent/smayorquin:StopClass':
        print("StopClass Intent detected!")
        try:
            del flow
        except:
            pass
    elif msg.topic == 'hermes/intent/smayorquin:PauseClass':
        print("PauseClass Intent detected!")
        current_pose = flow.current_pose
        try:
            del flow
        except:
            pass
    elif msg.topic == 'hermes/intent/smayorquin:RestartClass':
        print("RestartClass Intent detected!")
        flow = Flow(f, current_pose=current_pose) 
        flow.run()

        #  Need to add more features 
        #  features maybe be anything that
        # improves the applition but does not slow it down
        
client = mqtt.Client()
client.on_connect = on_connect
client.on_message = on_message
client.connect(HOST, PORT, 60)
client.loop_forever()
Example #3
0
def convert_model(master_spec, sess):
    # Create flow.
    flow = Flow()
    builder = FlowBuilder(sess, flow)

    # Get components.
    components = []
    connectors = {}
    for c in master_spec.component:
        component = Component(c, builder, connectors)
        components.append(component)

    # Extract components.
    for c in components:
        c.extract()

    # Sanitize names.
    for c in components:
        flow.rename_prefix(c.path() + "/", c.name + "/")
    flow.rename_suffix("/ExponentialMovingAverage:0", "")
    flow.rename_suffix(LSTM_H_IN + ":0", "h_in")
    flow.rename_suffix(LSTM_H_OUT + ":0", "h_out")
    flow.rename_suffix(LSTM_C_IN + ":0", "c_in")
    flow.rename_suffix(LSTM_C_OUT + ":0", "c_out")
    flow.rename_suffix(FF_HIDDEN + ":0", "hidden")
    flow.rename_suffix(FF_OUTPUT + ":0", "output")

    # Get external resources.
    lexicon_file = None
    prefix_file = None
    suffix_file = None
    commons_file = None
    actions_file = None
    for c in master_spec.component:
        for r in c.resource:
            if r.name == "word-vocab":
                lexicon_file = r.part[0].file_pattern
            elif r.name == "prefix-table":
                prefix_file = r.part[0].file_pattern
            elif r.name == "suffix-table":
                suffix_file = r.part[0].file_pattern
            elif r.name == "commons":
                commons_file = r.part[0].file_pattern
            elif r.name == "action-table":
                actions_file = r.part[0].file_pattern

    # Add lexicon to flow.
    if lexicon_file != None:
        lexicon = flow.blob("lexicon")
        lexicon.type = "dict"
        lexicon.add_attr("delimiter", 10)
        lexicon.add_attr("oov", 0)
        lexicon.add_attr("normalize_digits", 1)
        lexicon.data = read_file(lexicon_file)

    # Add prefix table to flow.
    if prefix_file != None:
        prefixes = flow.blob("prefixes")
        prefixes.type = "affix"
        prefixes.data = read_file(prefix_file)

    # Add suffix table to flow.
    if suffix_file != None:
        suffixes = flow.blob("suffixes")
        suffixes.type = "affix"
        suffixes.data = read_file(suffix_file)

    # Add commons to flow.
    if commons_file != None:
        commons = flow.blob("commons")
        commons.type = "frames"
        commons.data = read_file(commons_file)

    # Add action table to flow.
    if actions_file != None:
        actions = flow.blob("actions")
        actions.type = "frames"
        actions.data = read_file(actions_file)

    return flow
Example #4
0
def add_flow(pkt):
    flow = Flow(src_ip=pkt[IP].src, dst_ip=pkt[IP].dst, proto=pkt[IP].proto)
    flows.append(flow)
Example #5
0
def main():

    # tolerance in the computation
    tol = 1e-10

    # assign the flag for the low permeable fractures
    mesh_size = 1e-2
    tol_network = mesh_size
    mesh_kwargs = {
        "mesh_size_frac": mesh_size,
        "mesh_size_min": mesh_size / 20
    }

    # read and mark the original fracture network, the fractures id will be preserved
    file_name = "network.csv"
    domain = {"xmin": 0, "xmax": 1, "ymin": -1, "ymax": 1}
    network = pp.fracture_importer.network_2d_from_csv(file_name,
                                                       domain=domain)
    # set the original id
    network.tags["original_id"] = np.arange(network.num_frac, dtype=np.int)
    # save the original network
    network_original = network.copy()

    # set the condition, meaning if for a branch we solve problem with a < (1) or with > (0)
    # for simplicity we just set all equal
    network.tags["condition"] = np.ones(network.num_frac, dtype=np.int)

    flux_threshold = 0.15
    cond = lambda flux, op, tol=0: condition_interface(flux_threshold, flux,
                                                       op, tol)

    file_name = "case1"
    folder_name = "./non_linear/"
    variable_to_export = [
        Flow.pressure, Flow.P0_flux, "original_id", "condition"
    ]

    iteration = 0
    max_iteration = 50
    max_iteration_non_linear = 50
    max_err_non_linear = 1e-4
    okay = False
    while not okay:

        print("iteration", iteration)

        # create the grid bucket
        gb = network.mesh(mesh_kwargs,
                          dfn=True,
                          preserve_fracture_tags=["original_id", "condition"])

        # create the discretization
        discr = Flow(gb)

        # the mesh is changed as well as the interface, do not use the solution at previous step
        # initialize the non-linear algorithm by setting zero the flux which is equivalent to get
        # the Darcy solution at the first iteration
        for g, d in gb:
            d.update({pp.STATE: {}})
            d[pp.STATE].update({Flow.P0_flux: np.zeros((3, g.num_cells))})
            d[pp.STATE].update(
                {Flow.P0_flux + "_old": np.zeros((3, g.num_cells))})

        # non-linear problem solution with a fixed point strategy
        err_non_linear = max_err_non_linear + 1
        iteration_non_linear = 0
        while err_non_linear > max_err_non_linear and iteration_non_linear < max_iteration_non_linear:

            # solve the linearized problem
            discr.set_data(test_data())
            A, b = discr.matrix_rhs()
            x = sps.linalg.spsolve(A, b)
            discr.extract(x)

            # compute the exit condition
            all_flux = np.empty((3, 0))
            all_flux_old = np.empty((3, 0))
            all_cell_volumes = np.empty(0)
            for g, d in gb:
                # collect the current flux
                flux = d[pp.STATE][Flow.P0_flux]
                all_flux = np.hstack((all_flux, flux))
                # collect the old flux
                flux_old = d[pp.STATE][Flow.P0_flux + "_old"]
                all_flux_old = np.hstack((all_flux_old, flux_old))
                # collect the cell volumes
                all_cell_volumes = np.hstack(
                    (all_cell_volumes, g.cell_volumes))
                # save the old flux
                d[pp.STATE][Flow.P0_flux + "_old"] = flux

            # compute the error and normalize the result
            err_non_linear = np.sum(
                all_cell_volumes *
                np.linalg.norm(all_flux - all_flux_old, axis=0))
            norm_flux_old = np.sum(all_cell_volumes *
                                   np.linalg.norm(all_flux_old, axis=0))
            err_non_linear = err_non_linear / norm_flux_old if norm_flux_old != 0 else err_non_linear

            print("iteration non-linear problem", iteration_non_linear,
                  "error", err_non_linear)
            iteration_non_linear += 1

        # exporter
        save = pp.Exporter(gb, "sol_" + file_name, folder_name=folder_name)
        save.write_vtu(variable_to_export, time_step=iteration)

        # save the network points to check if we have reached convergence
        old_network_pts = network.pts

        # construct the new network such that the interfaces are respected
        network = detect_interface(gb, network, network_original, discr, cond,
                                   tol)
        # export the current network with the associated tags
        network_file_name = make_file_name(file_name, iteration)
        network.to_file(network_file_name,
                        data=network.tags,
                        folder_name=folder_name,
                        binary=False)

        # check if any point in the network has changed
        all_pts = np.hstack((old_network_pts, network.pts))
        distances = pp.distances.pointset(all_pts) > tol_network
        # consider only the block between the old and new points
        distances = distances[:old_network_pts.shape[1],
                              -network.pts.shape[1]:]
        # check if an old point has a point equal in the new set
        check = np.any(np.logical_not(distances), axis=0)

        if np.all(check) or iteration > max_iteration:
            okay = True
        iteration += 1

    save.write_pvd(np.arange(iteration), np.arange(iteration))
    write_network_pvd(file_name, folder_name, np.arange(iteration))