Пример #1
0
    def __init__(self,linkRate=1,maxStep=gobalMaxStep,injectRate=0.5,\
                     factory=gobalGridNetworkFactory):

        factory.constructNetwork(6,6)\
            .setFlow(Flow((0,0),(0,5),injectRate))\
            .setFlow(Flow((5,0),(5,5),injectRate))\
            .setFlow(Flow((2,0),(3,5),injectRate))

        network = factory.getNetwork()

        self.packetFactory = PacketFactory()
        self.simulator = \
            Simulator(network,maxStep,ConstLinkRateGenerator(linkRate),self.packetFactory)
Пример #2
0
def validateTCPFlowSigantureHashing(entryList):
    hashToFiveTupleMap = {}
    DEL = "+"

    for entry in entryList:
        if entry.logID == const.PROTOCOL_ID and \
           entry.ip["tlp_id"] == const.TCP_ID:
            five_tuple = str(entry.ip["src_ip"]) + DEL + \
                         str(entry.ip["dst_ip"]) + DEL + \
                         str(entry.ip["tlp_id"]) + DEL + \
                         str(entry.tcp["src_port"]) + DEL + \
                         str(entry.tcp["dst_port"])
            hashed_tuple = Flow.extractFlowSignature(entry)
            if hashToFiveTupleMap.has_key(hashed_tuple):
                if five_tuple not in hashToFiveTupleMap[hashed_tuple]:
                    hashToFiveTupleMap[hashed_tuple].append(five_tuple)
            else:
                hashToFiveTupleMap[hashed_tuple] = [five_tuple]

    valid = True
    for key,value in hashToFiveTupleMap.items():
        if len(value) != 2:
            valid = False
            print "Hashed Key: " + key + "\n" + "Tuples: " + value

    if valid:
        print "SUCCESS: hashing is valid !!!"
    else:
        print "ERROR: hashing is invalid !!!"
Пример #3
0
def CounterTest(step=10000):
    injectRate = 1
    factory = GridNetworkFactory(makeCNode(0), Queues)
    factory.constructNetwork(8,8)\
        .setFlow(Flow((0,0),(0,7),injectRate))\
        .setFlow(Flow((7,0),(7,7),injectRate))

    network = factory.getNetwork()

    packetFactory = PacketFactory()
    simulator = \
    Simulator(network,step,ConstLinkRateGenerator(1),packetFactory)
    simulator.run()
    #simulator.printNetwork()
    stat = simulator.getStaticsInfo()
    print stat
Пример #4
0
def validateTCPFlowSigantureHashing(entryList):
    hashToFiveTupleMap = {}
    DEL = "+"

    for entry in entryList:
        if entry.logID == const.PROTOCOL_ID and \
           entry.ip["tlp_id"] == const.TCP_ID:
            five_tuple = str(entry.ip["src_ip"]) + DEL + \
                         str(entry.ip["dst_ip"]) + DEL + \
                         str(entry.ip["tlp_id"]) + DEL + \
                         str(entry.tcp["src_port"]) + DEL + \
                         str(entry.tcp["dst_port"])
            hashed_tuple = Flow.extractFlowSignature(entry)
            if hashToFiveTupleMap.has_key(hashed_tuple):
                if five_tuple not in hashToFiveTupleMap[hashed_tuple]:
                    hashToFiveTupleMap[hashed_tuple].append(five_tuple)
            else:
                hashToFiveTupleMap[hashed_tuple] = [five_tuple]

    valid = True
    for key, value in hashToFiveTupleMap.items():
        if len(value) != 2:
            valid = False
            print "Hashed Key: " + key + "\n" + "Tuples: " + value

    if valid:
        print "SUCCESS: hashing is valid !!!"
    else:
        print "ERROR: hashing is invalid !!!"
Пример #5
0
class NetGame:

    Score = 0  # can be negative or positive
    level = ""
    totalTime = 0
    localNet = Network('Network 1', 3, 5, 100)
    localManager = Manager("admin")
    MyFlow = Flow("easy")

    def __init__(self, Network, Manager):
        self.localNet = NetGame
        self.localManager = Manager

    def setDifficultyLevel(self, level):
        self.level = level
        self.MyFlow = self.level
        print(self.level)

    def start(self, totalTime):
        self.totalTime = totalTime
        ## Here the main code
        input = "1"
        while (input != "0"):
            print("enter key")
            input = raw_input()
            print input
        jaafar()
Пример #6
0
 def addFlow(self,program,ftype):
     '''
     Add a Flow on this canvas, but let the user choose
     where to place it.
     '''
     flow = Flow(self.getFlowID(),program,ftype)
     self.configure(cursor="crosshair")
     self.bind('<Button-1>',
         lambda event, arg=flow: self.__addFlowAtMouse(event,arg) )
    def __init__(self, name, inputDataFile):
        # 여기서부터 completeness check 까지는 model generation info file 에서 읽어다가 만드는 함수 필요
        # 8 road stocks generation
        stocks = []
        for i in range(8):
            stocks.append(
                NumberOfCarStock(
                    "[stock] the number of cars of road " + str(i), 0, None))

        # 4 input stocks
        inputStocks = []
        for i in range(4):
            inputStocks.append(
                NumberOfCarInputStock("[stock] input of road " + str(i * 2), 0,
                                      None))

        # 4 flows from 4 inputs stocks to 4 road stocks & binding
        for i in range(4):
            source = i
            target = i * 2
            newFlow = Flow("[flow] input stock " + str(source) + " to stock " +
                           str(target))
            inputStocks[source].add_output_flow(newFlow)
            stocks[target].add_input_flow(newFlow)

        # 4 flows from 4 road stocks to None & binding
        for i in range(4):
            source = i * 2 + 1
            newFlow = Flow("[flow] stock " + str(source) + " to None")
            stocks[source].add_output_flow(newFlow)

        # 4*3 flows from 4 road stocks to 4 road stocks) & binding
        for i in range(4):
            source = i * 2
            for j in range(3):
                target = (source + 1 + (j + 1) * 2) % 8
                newFlow = Flow("[flow] stock " + str(source) + " to stock " +
                               str(target))
                stocks[source].add_output_flow(newFlow)
                stocks[target].add_input_flow(newFlow)

        # stocks binding to system dynamics
        SystemDynamics.__init__(self, name, stocks + inputStocks)
        self.initialize_input_flow_configuration(inputDataFile)
Пример #8
0
    def __init__(self,linkRate=1,maxStep=gobalMaxStep,injectRate=0.5,\
                     factory=gobalGridNetworkFactory):
        factory.constructNetwork(8,8)\
            .setFlow(Flow((2,0),(2,7),injectRate),Flow((4,0),(4,7),injectRate),
                     Flow((0,2),(7,2),injectRate),Flow((0,4),(7,4),injectRate),
                     Flow((1,1),(5,1),injectRate),Flow((6,1),(6,6),injectRate)
                     ,Flow((5,6),(1,6),injectRate),Flow((1,5),(1,2),injectRate))
        network = factory.getNetwork()

        self.packetFactory = PacketFactory()
        self.simulator = \
            Simulator(network,maxStep,ConstLinkRateGenerator(linkRate),self.packetFactory)
 def parseFlow(self, line):
     """
     Parse a single line of text and build a flow
     """
     if line != "":
         splitted = line.split(",");
         return Flow(splitted[0].split("=")[1], splitted[1].split("=")[1], splitted[2].split("=")[1],
                     splitted[3].split("=")[1], splitted[4].split("=")[1], splitted[5].split("=")[1],
                     splitted[6].split("=")[1], splitted[7], splitted[8].split("=")[1],
                     splitted[9].split("=")[1], splitted[10].split("=")[1], splitted[11].split("=")[1],
                     splitted[12].split("=")[1], splitted[13].split("=")[1], splitted[14]);
Пример #10
0
 def __init__(self, **kwargs):
     pygame.init()
     self.screen_info = pygame.display.Info()
     self.screen_size = kwargs.get("screen_size", (0, 0))
     self.resizable = kwargs.get("resizable", True)
     if self.resizable:
         self.screen = pygame.display.set_mode(self.screen_size, RESIZABLE)
     else:
         self.screen = pygame.display.set_mode(self.screen_size)
     self.rect = Rect((0, 0), self.screen.get_size())
     self.layout = kwargs.get("layout", None)
     if self.layout == "grid":
         Grid.__init__(self)
         self.layout = Grid
     elif self.layout == "flow":
         Flow.__init__(self)
         self.layout = Flow
     elif self.layout == "place" or self.layout is None:
         Place.__init__(self)
         self.layout = Place
     self.fullscreen = kwargs.get("fullscreen", False)
     self.last_screen_size = self.rect.size
def flow_combine(ip_pkt_list, ip_tms_list, flow_definition):
    """
    组流
    :param ip_pkt_list:ip数据包
    :param ip_tms_list:时间邮戳包
    :param flow_definition:单双流标识
    :return:组流后的流
    """
    flow_list = []
    src_port = None
    dst_port = None
    trans_layer_proto = None
    for (pkt_stream, tms) in zip(ip_pkt_list, ip_tms_list):
        eth = dpkt.ethernet.Ethernet(pkt_stream)
        pkt = eth.data
        src_ip = pkt.src
        dst_ip = pkt.dst
        if pkt.p == dpkt.ip.IP_PROTO_TCP:  # TCP数据包
            tcp_packet = pkt.tcp
            src_port = tcp_packet.sport
            dst_port = tcp_packet.dport
            trans_layer_proto = dpkt.ip.IP_PROTO_TCP
        elif pkt.p == dpkt.ip.IP_PROTO_UDP:  # UDP数据包
            udp_packet = pkt.udp
            src_port = udp_packet.sport
            dst_port = udp_packet.dport
            trans_layer_proto = dpkt.ip.IP_PROTO_UDP
        if len(flow_list) == 0:  # 初次
            flow = Flow(src_ip, dst_ip, src_port, dst_port, trans_layer_proto, eth, tms)
            flow_list.append(flow)
        else:
            flow_is_exist = False
            if flow_definition == 1:  # 单向流
                for flow_unit in flow_list:
                    """
                    判断是否同流
                    """
                    if flow_unit.src_ip == src_ip and flow_unit.dst_ip == dst_ip and flow_unit.src_port == src_port and flow_unit.dst_port == dst_port:
                        flow_is_exist = True
                        flow_unit.append_packet(eth, tms)
                        break
            elif flow_definition == 2:  # 双向流
                for flow_unit in flow_list:
                    if ((
                                flow_unit.src_ip == src_ip and flow_unit.dst_ip == dst_ip and flow_unit.src_port == src_port and flow_unit.dst_port == dst_port) or (
                                flow_unit.src_ip == dst_ip and flow_unit.dst_ip == src_ip and flow_unit.src_port == dst_port and flow_unit.dst_port == src_port)) and flow_unit.trans_layer_proto == trans_layer_proto:
                        flow_is_exist = True
                        flow_unit.append_packet(eth, tms)
                        break
            if not flow_is_exist:
                """
                插入新流
                """
                flow = Flow(src_ip, dst_ip, src_port, dst_port, trans_layer_proto, eth, tms)
                flow.append_packet(eth, tms)
                flow_list.append(flow)

    return flow_list
    def __init__(self,
                 eq_index,
                 z_coordinate,
                 is_boundary_x=False,
                 is_boundary_y=False,
                 has_well=False,
                 well_index=None):
        self.is_boundary_y = is_boundary_y
        self.is_boundary_x = is_boundary_x
        self.cell_states = [CellState(), CellState()]  # n, n + 1 layers
        self.eq_index = eq_index
        self.has_well = has_well
        self.well = Well(self, well_index,
                         horizontal=Layer.horizontal) if has_well else None
        self.z_coordinate = z_coordinate

        self.flow_array_x = np.array(
            [Flow() for _ in range(Layer.components_count)], dtype=Flow
        )  # Поток минус(для oil и water) Поток плюс(для oil и water)
        self.flow_array_y = np.array(
            [Flow() for _ in range(Layer.components_count)], dtype=Flow)
        self.flow_array_z = np.array(
            [Flow() for _ in range(Layer.components_count)], dtype=Flow)
Пример #13
0
def OrdTest():
    injectRate = 0.5
    factory = GridNetworkFactory(makeMNode(1), ShadowQueues)
    factory.constructNetwork(8,8)\
        .setFlow(Flow((2,0),(2,7),injectRate),Flow((4,0),(4,7),injectRate),\
                     Flow((0,2),(7,2),injectRate),Flow((0,4),(7,4),injectRate),\
                     Flow((1,1),(5,1),injectRate),Flow((6,1),(6,6),injectRate),\
                     Flow((5,6),(1,6),injectRate),Flow((1,5),(1,2),injectRate))
    network = factory.getNetwork()

    packetFactory = PacketFactory()
    simulator = \
    Simulator(network,gobalMaxStep,ConstLinkRateGenerator(1),packetFactory)
    simulator.run()
    stat = simulator.getStaticsInfo()
    print stat
Пример #14
0
def oneRoundDelay(step=10000):
    injectRate = 0.9
    factory = GridNetworkFactory(makeSimpleNode(), Queues)
    factory.constructNetwork(6,6)\
        .setFlow(Flow((0,0),(0,5),injectRate))\
        .setFlow(Flow((5,0),(5,5),injectRate))\
        .setFlow(Flow((2,0),(3,5),injectRate))

    network = factory.getNetwork()

    packetFactory = PacketFactory()
    simulator = \
        Simulator(network,step,ConstLinkRateGenerator(1),packetFactory)
    simulator.run()
    #simulator.printNetwork()
    stat = simulator.getStaticsInfo()
    print stat['aveDelay']
    packetPool = sorted(stat['packetPool'], key=lambda p: p.getID)

    py.subplot(211)
    py.vlines([p.getCreateTime() for p in packetPool], [1],
              [p.getDelay() for p in packetPool], 'r')
    py.xlabel('Packet create time(bp with $\lambda$ = 0.9)')
    py.ylabel('delay')
    py.grid(True)

    injectRate = 0.9
    factory = GridNetworkFactory(makeMNode(2), Queues)
    factory.constructNetwork(6,6)\
        .setFlow(Flow((0,0),(0,5),injectRate))\
        .setFlow(Flow((5,0),(5,5),injectRate))\
        .setFlow(Flow((2,0),(3,5),injectRate))

    network = factory.getNetwork()

    packetFactory = PacketFactory()
    simulator = \
        Simulator(network,step,ConstLinkRateGenerator(1),packetFactory)
    simulator.run()
    #simulator.printNetwork()

    stat = simulator.getStaticsInfo()
    print stat['aveDelay']
    packetPool = sorted(stat['packetPool'], key=lambda p: p.getID)

    py.subplot(212)
    py.vlines([p.getCreateTime() for p in packetPool], [1],
              [p.getDelay() for p in packetPool], 'b')
    py.xlabel('Packet create time (m=2 with $\lambda$ = 0.9)')
    py.ylabel('delay')
    py.grid(True)
    py.savefig('packetDelayInOneRound_09')
    py.show()
Пример #15
0
def create_network(K=3):
    flow_layers = []
    flow_layers += [
        CouplingLayer(network=GatedResNet(1, 16, K * 3 - 1),
                      mask=checkerBoardMask(h=28, w=28, inverse=(i % 2 == 1)),
                      in_channels=1) for i in range(4)
    ]

    flow_layers += [
        CouplingLayer(network=GatedResNet(1, 32, K * 3 - 1),
                      mask=checkerBoardMask(h=28, w=28, inverse=(i % 2 == 1)),
                      in_channels=1) for i in range(4)
    ]

    prior = torch.distributions.normal.Normal(loc=0.0, scale=1.0)
    shape = (128, 28)
    flow_model = Flow(prior, flow_layers, shape)
    return flow_model
Пример #16
0
def extractTCPFlows(entryList):
    finishedFlows = []
    # a map between flow's signature and flow
    ongoingFlows = {}

    for i in range(len(entryList)):
        entry = entryList[i]
        if entry.logID == const.PROTOCOL_ID and \
           entry.ip["tlp_id"] == const.TCP_ID:
            flow_signature = Flow.extractFlowSignature(entry)
            if flow_signature:
                if entry.tcp["SYN_FLAG"] and not entry.tcp["ACK_FLAG"]:
                    # capture a new flow by SYN packet
                    if not ongoingFlows.has_key(flow_signature):
                        # create a new flow
                        ongoingFlows[flow_signature] = Flow(flow_signature)
                        ongoingFlows[flow_signature].addPacket(entry, i)
                elif entry.tcp["FIN_FLAG"]:
                    # finish a TCP flow if there is one
                    if ongoingFlows.has_key(flow_signature):
                        ongoingFlows[flow_signature].addPacket(entry, i)
                        finishedFlows.append(ongoingFlows[flow_signature])
                        del ongoingFlows[flow_signature]
                else:
                    # add to existing ongoing flow
                    if ongoingFlows.has_key(flow_signature):
                        ongoingFlows[flow_signature].addPacket(entry, i)

    # wrap up anything leftover flow
    for flow in ongoingFlows.values():
        finishedFlows.append(flow)

    # filter out super short flow
    filteredFlows = []
    for f in finishedFlows:
        if len(f.flow) > 2:
            filteredFlows.append(f)

    # initiate the DNS trace
    dns = DNS(entryList)
    ipToURLMap = dns.getIpToURLMap()
    for f in filteredFlows:
        syn = f.flow[0]
        inverseIp = None
        if syn.ip["src_ip"] in ipToURLMap:
            inverseIp = syn.ip["src_ip"]
        elif syn.ip["dst_ip"] in ipToURLMap:
            inverseIp = syn.ip["dst_ip"]
        if inverseIp != None:
            f.setURL(ipToURLMap[inverseIp])
            if DNS_CHECK:
                print inverseIp + " -> " + str(ipToURLMap[inverseIp])

    if FLOW_CHECK:
        for f in finishedFlows:
            if f.properties["http"] != None:
                line = str(f.properties["http"]) + "\t" + str(len(f.flow)) + "\t" + \
                       util.convert_ts_in_human(f.flow[0].timestamp)
                if f.flow[0].rrcID != None:
                    line += "\t" + const.RRC_MAP[f.flow[0].rrcID]
                print line
                # print pw.printTCPEntry(f.flow[0])
        print "*" * 60
        print "Total # of flows are " + str(len(finishedFlows))
  
    return filteredFlows
Пример #17
0
                    return pair
        else:
            return None

    def __repr__(self):
        return '<'+'id: '+ repr(self.id) + \
            ' with ' + str(len(self.neigbours)) + ' neighbours ' + \
            repr(self.flow) + '>\n' +repr(self.queues)


def makeNewNode(rate):
    def makeNode(ID, flow, queuesType):
        return NewNode(ID, flow, queuesType, rate)

    return makeNode


if __name__ == "__main__":
    flow1 = Flow((1, 2), (2, 2), 1)
    node = Node((1, 2), flow1)
    node.queues[(2, 2)] = [1, 2, 3, 4]

    flow2 = Flow((2, 4), (3, 5), 1)
    node2 = Node((2, 4), None)
    node2.queues[(2, 2)] = [3, 3, 3]

    node.neigbours[node2.id] = node2

    node.calcNeighbourWeight()
    print node
Пример #18
0
def executeModelBottom(showFlows):

  ## Eight Unique Auxiliaries
  TimeToSmooth = Auxiliary("TimeToSmooth")
  MultiplierToRegen = Auxiliary("MultiplierToRegen")
  ActiveErrorDensity = Auxiliary("ActiveErrorDensity")
  TestingRate = Auxiliary("TestingRate")
  PassiveErrorDensity = Auxiliary("PassiveErrorDensity")
  FractionEscapingErrors = Auxiliary("FractionEscapingErrors")
  ActiveErrorsRetiringFraction = Auxiliary("ActiveErrorsRetiringFraction")
  BadFixGenRate = Auxiliary("BadFixGenRate")

  ## Six Flows
  ActiveErrorRegenRate = Flow("ActiveErrorRegenRate")
  ActiveErrorDetectAndCorrectRate = Flow("ActiveErrorDetectAndCorrectRate")
  ActiveErrorRetirementRate = Flow("ActiveErrorRetirementRate")
  PassiveErrorDetectAndCorrectRate = Flow("PassiveErrorDetectAndCorrectRate")
  PassiveErrorGenRate = Flow("PassiveErrorGenRate")
  ActiveErrorGenRate = Flow("ActiveErrorGenRate")

  auxDict = utility.createAuxiliaries_Bottom()

  ##States
  curr = State("CurrentState", False)
  prev = State("PrevState", False)
  dt = 1
  ##output & test purpose
  stockDict ={}


  for key_, val_ in auxDict.items():

    # Update stock from inflows and outflows
    curr.UndetectedActiveErrors_.setInput(dt * (prev.ActiveErrorRegenRate_.curr + prev.ActiveErrorGenRate_.curr) - (prev.ActiveErrorRetirementRate_.curr + prev.ActiveErrorDetectAndCorrectRate_.curr) )
    curr.UndetectedPassiveErrors_.setInput(dt * (prev.ActiveErrorRetirementRate_.curr + prev.PassiveErrorGenRate_.curr)- prev.PassiveErrorDetectAndCorrectRate_.curr) 
    print "{} ---> {}".format( key_,  curr)

    stockDict[key_]=[curr.UndetectedActiveErrors_.curr]
    print "---------------"

    #Setting up eight Auxiliaries
    TimeToSmooth.setInput(val_[0])
    MultiplierToRegen.setInput(val_[1])
    ActiveErrorDensity.setInput(val_[2])
    TestingRate.setInput(val_[3])
    ActiveErrorsRetiringFraction.setInput(val_[4])
    FractionEscapingErrors.setInput(val_[5])
    BadFixGenRate.setInput(val_[6])
    PassiveErrorDensity.setInput(val_[7] + curr.UndetectedPassiveErrors_.curr)

    # Filling Flows : six flows
    ActiveErrorRegenRate.fillFlowsByAuxs(TimeToSmooth, MultiplierToRegen, ActiveErrorDensity)
    ActiveErrorDetectAndCorrectRate.fillFlowsByAuxs(ActiveErrorDensity)
    ActiveErrorRetirementRate.fillFlowsByAuxs(TestingRate, ActiveErrorsRetiringFraction)
    ActiveErrorGenRate.fillFlowsByAuxs(FractionEscapingErrors, BadFixGenRate)
    PassiveErrorGenRate.fillFlowsByAuxs(BadFixGenRate, FractionEscapingErrors)
    PassiveErrorDetectAndCorrectRate.fillFlowsByAuxs(PassiveErrorDensity, TestingRate)

    # updating current state's flows: six flows
    curr.updateActiveErrorRegenRate(ActiveErrorRegenRate)
    curr.updateActiveErrorDetectAndCorrectRate(ActiveErrorDetectAndCorrectRate)
    curr.updateActiveErrorRetirementRate(ActiveErrorRetirementRate)
    curr.updateActiveErrorGenRate(ActiveErrorGenRate)
    curr.updatePassiveErrorGenRate(PassiveErrorGenRate)
    curr.updatePassiveErrorDetectAndCorrectRate(PassiveErrorDetectAndCorrectRate)

    if(showFlows):
      print "{} ---> {}".format( key_,  curr.getFlows())
      print "---------------"
    prev = curr.copyBottom("Prev")
    #print "Prev: ZZZ ",prev_
    print "###################"
  return stockDict
Пример #19
0
if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('input_dir')
    parser.add_argument('save_dir')
    parser.add_argument('--show',
                        type=bool,
                        default=False,
                        help='Shows optical flow while processing')
    args = parser.parse_args()

    flow_x_list = sorted(glob(osp.join(args.input_dir, 'flow_x_*.jpg')))
    flow_y_list = sorted(glob(osp.join(args.input_dir, 'flow_y_*.jpg')))

    bound = 20  #Farnback was calculated with this value
    flow = Flow()

    for idx, x_path, y_path in zip(range(1,
                                         len(flow_x_list) + 1), flow_x_list,
                                   flow_y_list):
        x_img = cvReadImg(x_path)[..., 0]
        y_img = cvReadImg(y_path)[..., 0]
        h, w = x_img.shape[:2]

        nnf = np.zeros((h, w, 2), dtype=np.float32)
        nnf[..., 0] = ((x_img / 255.) * 2 * bound) - bound
        nnf[..., 1] = ((y_img / 255.) * 2 * bound) - bound

        color_map = flow.visualize(nnf) * 255.
        cv2.imwrite(os.path.join(args.save_dir, '%06d.png' % idx), color_map)
Пример #20
0
def extractTCPFlows(entryList):
    finishedFlows = []
    # a map between flow's signature and flow
    ongoingFlows = {}

    for i in range(len(entryList)):
        entry = entryList[i]
        if entry.logID == const.PROTOCOL_ID and \
           entry.ip["tlp_id"] == const.TCP_ID:
            flow_signature = Flow.extractFlowSignature(entry)
            if flow_signature:
                if entry.tcp["SYN_FLAG"] and not entry.tcp["ACK_FLAG"]:
                    # capture a new flow by SYN packet
                    if not ongoingFlows.has_key(flow_signature):
                        # create a new flow
                        ongoingFlows[flow_signature] = Flow(flow_signature)
                        ongoingFlows[flow_signature].addPacket(entry, i)
                elif entry.tcp["FIN_FLAG"]:
                    # finish a TCP flow if there is one
                    if ongoingFlows.has_key(flow_signature):
                        ongoingFlows[flow_signature].addPacket(entry, i)
                        finishedFlows.append(ongoingFlows[flow_signature])
                        del ongoingFlows[flow_signature]
                else:
                    # add to existing ongoing flow
                    if ongoingFlows.has_key(flow_signature):
                        ongoingFlows[flow_signature].addPacket(entry, i)

    # wrap up anything leftover flow
    for flow in ongoingFlows.values():
        finishedFlows.append(flow)

    # filter out super short flow
    filteredFlows = []
    for f in finishedFlows:
        if len(f.flow) > 2:
            filteredFlows.append(f)

    # initiate the DNS trace
    dns = DNS(entryList)
    ipToURLMap = dns.getIpToURLMap()
    for f in filteredFlows:
        syn = f.flow[0]
        inverseIp = None
        if syn.ip["src_ip"] in ipToURLMap:
            inverseIp = syn.ip["src_ip"]
        elif syn.ip["dst_ip"] in ipToURLMap:
            inverseIp = syn.ip["dst_ip"]
        if inverseIp != None:
            f.setURL(ipToURLMap[inverseIp])
            if DNS_CHECK:
                print inverseIp + " -> " + str(ipToURLMap[inverseIp])

    if FLOW_CHECK:
        for f in finishedFlows:
            if f.properties["http"] != None:
                line = str(f.properties["http"]) + "\t" + str(len(f.flow)) + "\t" + \
                       util.convert_ts_in_human(f.flow[0].timestamp)
                if f.flow[0].rrcID != None:
                    line += "\t" + const.RRC_MAP[f.flow[0].rrcID]
                print line
                # print pw.printTCPEntry(f.flow[0])
        print "*" * 60
        print "Total # of flows are " + str(len(finishedFlows))

    return filteredFlows
import xml.etree.ElementTree as ET
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("flows")
args = parser.parse_args()

root = ET.parse(args.flows).getroot()
configs = dict()
from Flow import Flow

for r in root:
    f = Flow(r)
    gc = f.element.get("generating_config")
    if gc not in configs:
        configs[gc] = dict()

    apk = f.get_file()
    if apk not in configs[gc]:
        configs[gc][apk] = 1
    else:
        configs[gc][apk] += 1

for k, v in configs.items():
    print(f"{k}:")
    for k1, v1 in v.items():
        print(f"\t{k1}: {v1}")
Пример #22
0
parser = argparse.ArgumentParser("deduplicate a file of xml flow reports")
parser.add_argument("input", help="undeduplicated xml file")
parser.add_argument("output", help="output xml file")
args = parser.parse_args()

import logging
import os
logging.basicConfig(level=logging.WARNING)
import xml.etree.ElementTree as ET
from Flow import Flow
from tqdm import tqdm

# Read input file
tree = ET.parse(args.input)
root = tree.getroot()

result = set()
for f in tqdm(root):
    flow = Flow(f)
    result.add(flow)
result = sorted(result)

print(f"Finished deduplicating. In total, {len(result)} flows are unique.")
print(f"Writing to file...")
newRoot = ET.Element("flows")
newTree = ET.ElementTree(newRoot)
for r in tqdm(result):
    newRoot.append(r.element)

newTree.write(args.output)
Пример #23
0
from Flow import Flow
import os
import xml.etree.ElementTree as ET

import argparse
parser = argparse.ArgumentParser("get distribution of flows among files")
parser.add_argument("input")
args = parser.parse_args()

root = ET.parse(args.input).getroot()

configs = dict()

flows = [Flow(f) for f in root]
for f in flows:
    gc = f.element.get("generating_config")
    if gc not in configs:
        configs[gc] = dict()
    apk = f.get_file()
    if apk not in configs[gc]:
        configs[gc][apk] = 1
    else:
        configs[gc][apk] += 1

for k, v in configs.items():
    print(f"{k}")
    for k1, v1, in v.items():
        print(f"\t{k1}: {v1}")
Пример #24
0
def executeModelForBaseline(auxListParam, currStateParam, prevStateParam, dt):
   ##Nine Unique Auxiliaries : Top 
   MultiplierSchedPressure = Auxiliary("MultiplierSchedPressure")
   MultiplierWorkforce = Auxiliary("MultiplierWorkforce")
   NominalErr = Auxiliary("NominalErr")
   SWDevelopmentRate = Auxiliary("SWDevelopmentRate")
   PotErrDetectRate = Auxiliary("PotErrDetectRate")
   QARate = Auxiliary("QARate")
   AvgErrPerTask = Auxiliary("AvgErrPerTask")
   ActualReworkMP = Auxiliary("ActualReworkMP")
   DailyMPRework = Auxiliary("DailyMPRework")


   ## Eight Unique Auxiliaries :Bottom
   TimeToSmooth = Auxiliary("TimeToSmooth")
   MultiplierToRegen = Auxiliary("MultiplierToRegen")
   ActiveErrorDensity = Auxiliary("ActiveErrorDensity")
   TestingRate = Auxiliary("TestingRate")
   PassiveErrorDensity = Auxiliary("PassiveErrorDensity")
   FractionEscapingErrors = Auxiliary("FractionEscapingErrors")
   ActiveErrorsRetiringFraction = Auxiliary("ActiveErrorsRetiringFraction")
   BadFixGenRate = Auxiliary("BadFixGenRate")

   ### Four Flows from Top
   ErrGenRate = Flow("ErrGenRate")
   ErrDetRate = Flow("ErrDetRate")
   ErrEscapeRate = Flow("ErrEscapeRate")
   ReworkRate = Flow("ReworkRate")


   ## Six Flows from Bottom
   ActiveErrorRegenRate = Flow("ActiveErrorRegenRate")
   ActiveErrorDetectAndCorrectRate = Flow("ActiveErrorDetectAndCorrectRate")
   ActiveErrorRetirementRate = Flow("ActiveErrorRetirementRate")
   PassiveErrorDetectAndCorrectRate = Flow("PassiveErrorDetectAndCorrectRate")
   PassiveErrorGenRate = Flow("PassiveErrorGenRate")
   ActiveErrorGenRate = Flow("ActiveErrorGenRate")    
    
   # current state's stocks are dependent on prev. state's flows
   # some have in and out flows
   currStateParam.PotentiallyDetectableError_.setInput(dt * (prevStateParam.ErrGenRate_.curr - prevStateParam.ErrDetRate_.curr - prevStateParam.ErrEscapeRate_.curr ))
   currStateParam.DetectedError_.setInput( dt*( prevStateParam.ErrDetRate_.curr - prevStateParam.ReworkRate_.curr  ))

   # some only have in flows from top
   currStateParam.EscapedError_.setInput( dt*(prevStateParam.ErrEscapeRate_.curr))
   currStateParam.ReworkedError_.setInput(dt*(prevStateParam.ReworkRate_.curr))

   # Update stock from inflows and outflows from Bottom
   currStateParam.UndetectedActiveErrors_.setInput(dt * (prevStateParam.ActiveErrorRegenRate_.curr + prevStateParam.ActiveErrorGenRate_.curr) - (prevStateParam.ActiveErrorRetirementRate_.curr + prevStateParam.ActiveErrorDetectAndCorrectRate_.curr) )
   currStateParam.UndetectedPassiveErrors_.setInput(dt * (prevStateParam.ActiveErrorRetirementRate_.curr + prevStateParam.PassiveErrorGenRate_.curr)- prevStateParam.PassiveErrorDetectAndCorrectRate_.curr) 
   

   #setting up auxiliaries
   MultiplierSchedPressure.setInput(auxListParam[0])
   MultiplierWorkforce.setInput(auxListParam[1])
   NominalErr.setInput(auxListParam[2])
   SWDevelopmentRate.setInput(auxListParam[3])
   PotErrDetectRate.setInput(auxListParam[4])
   AvgErrPerTask.setInput(auxListParam[5])
   QARate.setInput(auxListParam[6])
   ActualReworkMP.setInput(auxListParam[7])
   DailyMPRework.setInput(auxListParam[8])

   #Setting up eight Auxiliaries from Bottom
   TimeToSmooth.setInput(auxListParam[9])
   MultiplierToRegen.setInput(auxListParam[10])
   ActiveErrorDensity.setInput(auxListParam[11])
   TestingRate.setInput(auxListParam[12])
   ActiveErrorsRetiringFraction.setInput(auxListParam[13])
   PassiveErrorDensity.setInput(auxListParam[16] + currStateParam.UndetectedPassiveErrors_.curr)

   #filling flows on the top
   ErrGenRate.fillFlowsByAuxs(MultiplierSchedPressure, MultiplierWorkforce, NominalErr, SWDevelopmentRate)
   ErrDetRate.fillFlowsByAuxs(PotErrDetectRate)
   ErrEscapeRate.fillFlowsByAuxs(AvgErrPerTask, QARate)
   ReworkRate.fillFlowsByAuxs(ActualReworkMP, DailyMPRework)
   
   # updating current state's flows
   currStateParam.updateErrGenRate(ErrGenRate)
   currStateParam.updateErrDetRate(ErrDetRate)
   currStateParam.updateErrEscapeRate(ErrEscapeRate)
   currStateParam.updateReworkRate(ReworkRate)   

   # Connecting top to the bottom 
   # Error Escape Rate -> Fraction Escaping Errors
   # Rework Rate -> Bad Fix Generation Rate   
   FractionEscapingErrors.setInput(auxListParam[14] + ErrEscapeRate.curr) 
   BadFixGenRate.setInput(auxListParam[15] + ReworkRate.curr)
   
   # Filling Flows : six flows from Bottom
   ActiveErrorRegenRate.fillFlowsByAuxs(TimeToSmooth, MultiplierToRegen, ActiveErrorDensity)
   ActiveErrorDetectAndCorrectRate.fillFlowsByAuxs(ActiveErrorDensity)
   ActiveErrorRetirementRate.fillFlowsByAuxs(TestingRate, ActiveErrorsRetiringFraction)
   ActiveErrorGenRate.fillFlowsByAuxs(FractionEscapingErrors, BadFixGenRate)
   PassiveErrorGenRate.fillFlowsByAuxs(BadFixGenRate, FractionEscapingErrors)
   PassiveErrorDetectAndCorrectRate.fillFlowsByAuxs(PassiveErrorDensity, TestingRate)


   # updating current state's flows: six flows from bottom
   currStateParam.updateActiveErrorRegenRate(ActiveErrorRegenRate)
   currStateParam.updateActiveErrorDetectAndCorrectRate(ActiveErrorDetectAndCorrectRate)
   currStateParam.updateActiveErrorRetirementRate(ActiveErrorRetirementRate)
   currStateParam.updateActiveErrorGenRate(ActiveErrorGenRate)
   currStateParam.updatePassiveErrorGenRate(PassiveErrorGenRate)
   currStateParam.updatePassiveErrorDetectAndCorrectRate(PassiveErrorDetectAndCorrectRate)


   ## copying current to prev. 
   prevStateParam = currStateParam.copyAll("prev") 
   return prevStateParam, currStateParam     
Пример #25
0
                                               dstHopPair=dstIDandQueue)

    def __statProcess(self, packets, **params):

        for packet in packets:
            packet.addHopNum()
            #print 'params', params
            if packet.getDst() == params['nbr']:
                #print '----recved'
                self.packetPool.append(packet.setDelay(self.currentStep))


if __name__ == "__main__":
    factory = GridNetworkFactory(Node, Queues)
    factory.constructNetwork(8,8)\
        .setFlow(Flow((0,0),(2,2),1),\
                     Flow((0,1),(2,1),1))
    network = factory.getNetwork()
    packetFactory = PacketFactory()


    network[(0,0)].queues[(1,1)] = \
        [packetFactory.getPacket(currentTime=0, src=(0,0),dst=(1,1)),\
             packetFactory.getPacket(currentTime=0, src=(0,0),dst=(1,1)),\
             packetFactory.getPacket(currentTime=0, src=(0,0),dst=(1,1))]
    network[(0,1)].queues[(0,2)] = \
        [packetFactory.getPacket(currentTime=0,src=(0,1),dst=(0,2)),\
             packetFactory.getPacket(currentTime=0,src=(0,1),dst=(0,2))]

    simulator = Simulator(network, 100, ConstLinkRateGenerator(1),
                          packetFactory)
Пример #26
0
import subprocess
import sys
import csv
import time
import threading
import getopt

from Bme280 import Bme280
from Load import LoadCell
from Flow import Flow
from Stopwatch import Timer
from Obj_relay import Relay

# GLOBAL VARIABLES:
load_cell = LoadCell()
flow_input = Flow()
bme280 = Bme280()
timer = Timer()
rel_pump = Relay(2)

rows_load = []
rows_pressure = []
rows_flow = []

running = True


def create_csv(rows):
    header = ['timestamp', 'value', 'type']
    desc = input("Enter description: ").replace(" ", "_")
    filename = '{:%Y%m%d-%H%M%S}'.format(datetime.datetime.now()) + "-" + desc
Пример #27
0
p.add_argument("-c", "--classifications")
p.add_argument("-i", "--input")
p.add_argument("--header", action="store_true")
import os
args = p.parse_args()

if args.header:
    print("file,num_matches,num_true,num_false,precision")
    exit(0)

# Read in classifications file
if args.classifications is None:
    raise ValueError("Need to supply classifications file.")
ctree = ET.parse(args.classifications)
croot = ctree.getroot()
cs = [Flow(f) for f in croot]

# Read in input
if args.input is None:
    raise ValueError("Need to supply input.")
itree = ET.parse(args.input)
iroot = itree.getroot().find('flows')
inputs = list(set([Flow(f) for f in iroot])) if iroot is not None else [
]  # do this to deduplicate

overlap = [c for c in cs if c in inputs]
num_true = [
    c for c in overlap if c.element.find("classification").find(
        'result').text.upper().startswith('TRUE')
]
num_false = [
Пример #28
0
from Flow import Flow
import xml.etree.ElementTree as ET
import argparse

parser = argparse.ArgumentParser()
parser.add_argument("input")
args = parser.parse_args()

flows = [Flow(f) for f in ET.parse(args.input).getroot()]

apks = dict()
for f in flows:
    if f.get_file() not in apks:
        apks[f.get_file()] = list()
    apks[f.get_file()].append(f)

global_classified = 0
global_unclassified = 0
global_types = dict()
for k, v in apks.items():
    classified = 0
    unclassified = 0
    types = dict()
    for f in v:
        try:
            result = f.element.find("classification")
            if result.text.strip() == "":
                raise AttributeError("empty result")
            if result.text.upper() not in types:
                types[result.text.upper()] = 0
            if result.text.upper() not in global_types:
Пример #29
0
def executeModelAll(showFlows):

    ##Nine Unique Auxiliaries : Top
    MultiplierSchedPressure = Auxiliary("MultiplierSchedPressure")
    MultiplierWorkforce = Auxiliary("MultiplierWorkforce")
    NominalErr = Auxiliary("NominalErr")
    SWDevelopmentRate = Auxiliary("SWDevelopmentRate")
    PotErrDetectRate = Auxiliary("PotErrDetectRate")
    QARate = Auxiliary("QARate")
    AvgErrPerTask = Auxiliary("AvgErrPerTask")
    ActualReworkMP = Auxiliary("ActualReworkMP")
    DailyMPRework = Auxiliary("DailyMPRework")

    ## Eight Unique Auxiliaries :Bottom
    TimeToSmooth = Auxiliary("TimeToSmooth")
    MultiplierToRegen = Auxiliary("MultiplierToRegen")
    ActiveErrorDensity = Auxiliary("ActiveErrorDensity")
    TestingRate = Auxiliary("TestingRate")
    PassiveErrorDensity = Auxiliary("PassiveErrorDensity")
    FractionEscapingErrors = Auxiliary("FractionEscapingErrors")
    ActiveErrorsRetiringFraction = Auxiliary("ActiveErrorsRetiringFraction")
    BadFixGenRate = Auxiliary("BadFixGenRate")

    ### Four Flows from Top
    ErrGenRate = Flow("ErrGenRate")
    ErrDetRate = Flow("ErrDetRate")
    ErrEscapeRate = Flow("ErrEscapeRate")
    ReworkRate = Flow("ReworkRate")

    ## Six Flows from Bottom
    ActiveErrorRegenRate = Flow("ActiveErrorRegenRate")
    ActiveErrorDetectAndCorrectRate = Flow("ActiveErrorDetectAndCorrectRate")
    ActiveErrorRetirementRate = Flow("ActiveErrorRetirementRate")
    PassiveErrorDetectAndCorrectRate = Flow("PassiveErrorDetectAndCorrectRate")
    PassiveErrorGenRate = Flow("PassiveErrorGenRate")
    ActiveErrorGenRate = Flow("ActiveErrorGenRate")

    ## we need to fill up auxiliaries ...
    auxDict = utility.createAuxiliaries_All()
    ##States
    curr = StateAll("CurrentState")
    prev = StateAll("PrevState")
    dt = 1
    ##output & test purpose
    stockDict = {}
    for key_, val_ in auxDict.items():
        # current state's stocks are dependent on prev. state's flows
        # some have in and out flows
        curr.PotentiallyDetectableError_.setInput(
            dt * (prev.ErrGenRate_.curr - prev.ErrDetRate_.curr -
                  prev.ErrEscapeRate_.curr))
        curr.DetectedError_.setInput(
            dt * (prev.ErrDetRate_.curr - prev.ReworkRate_.curr))

        # some only have in flows from top
        curr.EscapedError_.setInput(dt * (prev.ErrEscapeRate_.curr))
        curr.ReworkedError_.setInput(dt * (prev.ReworkRate_.curr))

        # Update stock from inflows and outflows from Bottom
        curr.UndetectedActiveErrors_.setInput(
            dt *
            (prev.ActiveErrorRegenRate_.curr + prev.ActiveErrorGenRate_.curr) -
            (prev.ActiveErrorRetirementRate_.curr +
             prev.ActiveErrorDetectAndCorrectRate_.curr))
        curr.UndetectedPassiveErrors_.setInput(
            dt * (prev.ActiveErrorRetirementRate_.curr +
                  prev.PassiveErrorGenRate_.curr) -
            prev.PassiveErrorDetectAndCorrectRate_.curr)

        print "{} ---> {}".format(key_, curr)
        #setup output
        stockDict[key_] = [
            curr.PotentiallyDetectableError_.curr, curr.DetectedError_.curr,
            curr.EscapedError_.curr, curr.ReworkedError_.curr,
            curr.UndetectedActiveErrors_.curr,
            curr.UndetectedPassiveErrors_.curr
        ]
        print "---------------"
        #setting up auxiliaries
        MultiplierSchedPressure.setInput(val_[0])
        MultiplierWorkforce.setInput(val_[1])
        NominalErr.setInput(val_[2])
        SWDevelopmentRate.setInput(val_[3])
        PotErrDetectRate.setInput(val_[4])
        AvgErrPerTask.setInput(val_[5])
        QARate.setInput(val_[6])
        ActualReworkMP.setInput(val_[7])
        DailyMPRework.setInput(val_[8])

        #Setting up eight Auxiliaries from Bottom
        TimeToSmooth.setInput(val_[9])
        MultiplierToRegen.setInput(val_[10])
        ActiveErrorDensity.setInput(val_[11])
        TestingRate.setInput(val_[12])
        ActiveErrorsRetiringFraction.setInput(val_[13])
        PassiveErrorDensity.setInput(val_[16] +
                                     curr.UndetectedPassiveErrors_.curr)

        #filling flows on the top
        ErrGenRate.fillFlowsByAuxs(MultiplierSchedPressure,
                                   MultiplierWorkforce, NominalErr,
                                   SWDevelopmentRate)
        ErrDetRate.fillFlowsByAuxs(PotErrDetectRate)
        ErrEscapeRate.fillFlowsByAuxs(AvgErrPerTask, QARate)
        ReworkRate.fillFlowsByAuxs(ActualReworkMP, DailyMPRework)

        # updating current state's flows
        curr.updateErrGenRate(ErrGenRate)
        curr.updateErrDetRate(ErrDetRate)
        curr.updateErrEscapeRate(ErrEscapeRate)
        curr.updateReworkRate(ReworkRate)

        # Connecting top to the bottom
        # Error Escape Rate -> Fraction Escaping Errors
        # Rework Rate -> Bad Fix Generation Rate
        FractionEscapingErrors.setInput(val_[14] + ErrEscapeRate.curr)
        BadFixGenRate.setInput(val_[15] + ReworkRate.curr)

        # Filling Flows : six flows from Bottom
        ActiveErrorRegenRate.fillFlowsByAuxs(TimeToSmooth, MultiplierToRegen,
                                             ActiveErrorDensity)
        ActiveErrorDetectAndCorrectRate.fillFlowsByAuxs(ActiveErrorDensity)
        ActiveErrorRetirementRate.fillFlowsByAuxs(
            TestingRate, ActiveErrorsRetiringFraction)
        ActiveErrorGenRate.fillFlowsByAuxs(FractionEscapingErrors,
                                           BadFixGenRate)
        PassiveErrorGenRate.fillFlowsByAuxs(BadFixGenRate,
                                            FractionEscapingErrors)
        PassiveErrorDetectAndCorrectRate.fillFlowsByAuxs(
            PassiveErrorDensity, TestingRate)

        # updating current state's flows: six flows from bottom
        curr.updateActiveErrorRegenRate(ActiveErrorRegenRate)
        curr.updateActiveErrorDetectAndCorrectRate(
            ActiveErrorDetectAndCorrectRate)
        curr.updateActiveErrorRetirementRate(ActiveErrorRetirementRate)
        curr.updateActiveErrorGenRate(ActiveErrorGenRate)
        curr.updatePassiveErrorGenRate(PassiveErrorGenRate)
        curr.updatePassiveErrorDetectAndCorrectRate(
            PassiveErrorDetectAndCorrectRate)

        if (showFlows):
            print "Printing F-L-O-W-S !"
            print "key, flow value ---> {}, {}".format(key_, curr.getFlows())
        ## copying current to prev.
        prev = curr.copyAll("prev")
        print "###################"
    return stockDict
Пример #30
0
from Flow import Flow
from glob import glob
import os.path as osp
import cv2
import os

def get_filename(file_path):
    return osp.splitext(osp.basename(flo_path))[0]

if __name__=='__main__':
    flo_dir = '/mnt/Alfheim/Data/DIVA_Proposals/optical_flow/pwcnet/VIRAT_S_000005_100'
    hsv_dir = '/mnt/Alfheim/Data/DIVA_Proposals/hsv/pwcnet/VIRAT_S_000005_100/'
    flo_path_list = sorted(glob(osp.join(flo_dir, '*.flo')))
    flow = Flow()

    for flo_path in flo_path_list:
        nnf = flow.read(flo_path)
        color_map = flow.visualize(nnf) * 255.
        cv2.imwrite(osp.join(hsv_dir, '%s.png'%get_filename(flo_path)), color_map)
Пример #31
0
def executeModelForBaseline(auxListParam, currStateParam, prevStateParam, dt):
    ##Nine Unique Auxiliaries : Top
    MultiplierSchedPressure = Auxiliary("MultiplierSchedPressure")
    MultiplierWorkforce = Auxiliary("MultiplierWorkforce")
    NominalErr = Auxiliary("NominalErr")
    SWDevelopmentRate = Auxiliary("SWDevelopmentRate")
    PotErrDetectRate = Auxiliary("PotErrDetectRate")
    QARate = Auxiliary("QARate")
    AvgErrPerTask = Auxiliary("AvgErrPerTask")
    ActualReworkMP = Auxiliary("ActualReworkMP")
    DailyMPRework = Auxiliary("DailyMPRework")

    ## Eight Unique Auxiliaries :Bottom
    TimeToSmooth = Auxiliary("TimeToSmooth")
    MultiplierToRegen = Auxiliary("MultiplierToRegen")
    ActiveErrorDensity = Auxiliary("ActiveErrorDensity")
    TestingRate = Auxiliary("TestingRate")
    PassiveErrorDensity = Auxiliary("PassiveErrorDensity")
    FractionEscapingErrors = Auxiliary("FractionEscapingErrors")
    ActiveErrorsRetiringFraction = Auxiliary("ActiveErrorsRetiringFraction")
    BadFixGenRate = Auxiliary("BadFixGenRate")

    ### Four Flows from Top
    ErrGenRate = Flow("ErrGenRate")
    ErrDetRate = Flow("ErrDetRate")
    ErrEscapeRate = Flow("ErrEscapeRate")
    ReworkRate = Flow("ReworkRate")

    ## Six Flows from Bottom
    ActiveErrorRegenRate = Flow("ActiveErrorRegenRate")
    ActiveErrorDetectAndCorrectRate = Flow("ActiveErrorDetectAndCorrectRate")
    ActiveErrorRetirementRate = Flow("ActiveErrorRetirementRate")
    PassiveErrorDetectAndCorrectRate = Flow("PassiveErrorDetectAndCorrectRate")
    PassiveErrorGenRate = Flow("PassiveErrorGenRate")
    ActiveErrorGenRate = Flow("ActiveErrorGenRate")

    # current state's stocks are dependent on prev. state's flows
    # some have in and out flows
    currStateParam.PotentiallyDetectableError_.setInput(
        dt *
        (prevStateParam.ErrGenRate_.curr - prevStateParam.ErrDetRate_.curr -
         prevStateParam.ErrEscapeRate_.curr))
    currStateParam.DetectedError_.setInput(
        dt *
        (prevStateParam.ErrDetRate_.curr - prevStateParam.ReworkRate_.curr))

    # some only have in flows from top
    currStateParam.EscapedError_.setInput(dt *
                                          (prevStateParam.ErrEscapeRate_.curr))
    currStateParam.ReworkedError_.setInput(dt *
                                           (prevStateParam.ReworkRate_.curr))

    # Update stock from inflows and outflows from Bottom
    currStateParam.UndetectedActiveErrors_.setInput(
        dt * (prevStateParam.ActiveErrorRegenRate_.curr +
              prevStateParam.ActiveErrorGenRate_.curr) -
        (prevStateParam.ActiveErrorRetirementRate_.curr +
         prevStateParam.ActiveErrorDetectAndCorrectRate_.curr))
    currStateParam.UndetectedPassiveErrors_.setInput(
        dt * (prevStateParam.ActiveErrorRetirementRate_.curr +
              prevStateParam.PassiveErrorGenRate_.curr) -
        prevStateParam.PassiveErrorDetectAndCorrectRate_.curr)

    #setting up auxiliaries
    MultiplierSchedPressure.setInput(auxListParam[0])
    MultiplierWorkforce.setInput(auxListParam[1])
    NominalErr.setInput(auxListParam[2])
    SWDevelopmentRate.setInput(auxListParam[3])
    PotErrDetectRate.setInput(auxListParam[4])
    AvgErrPerTask.setInput(auxListParam[5])
    QARate.setInput(auxListParam[6])
    ActualReworkMP.setInput(auxListParam[7])
    DailyMPRework.setInput(auxListParam[8])

    #Setting up eight Auxiliaries from Bottom
    TimeToSmooth.setInput(auxListParam[9])
    MultiplierToRegen.setInput(auxListParam[10])
    ActiveErrorDensity.setInput(auxListParam[11])
    TestingRate.setInput(auxListParam[12])
    ActiveErrorsRetiringFraction.setInput(auxListParam[13])
    PassiveErrorDensity.setInput(auxListParam[16] +
                                 currStateParam.UndetectedPassiveErrors_.curr)

    #filling flows on the top
    ErrGenRate.fillFlowsByAuxs(MultiplierSchedPressure, MultiplierWorkforce,
                               NominalErr, SWDevelopmentRate)
    ErrDetRate.fillFlowsByAuxs(PotErrDetectRate)
    ErrEscapeRate.fillFlowsByAuxs(AvgErrPerTask, QARate)
    ReworkRate.fillFlowsByAuxs(ActualReworkMP, DailyMPRework)

    # updating current state's flows
    currStateParam.updateErrGenRate(ErrGenRate)
    currStateParam.updateErrDetRate(ErrDetRate)
    currStateParam.updateErrEscapeRate(ErrEscapeRate)
    currStateParam.updateReworkRate(ReworkRate)

    # Connecting top to the bottom
    # Error Escape Rate -> Fraction Escaping Errors
    # Rework Rate -> Bad Fix Generation Rate
    FractionEscapingErrors.setInput(auxListParam[14] + ErrEscapeRate.curr)
    BadFixGenRate.setInput(auxListParam[15] + ReworkRate.curr)

    # Filling Flows : six flows from Bottom
    ActiveErrorRegenRate.fillFlowsByAuxs(TimeToSmooth, MultiplierToRegen,
                                         ActiveErrorDensity)
    ActiveErrorDetectAndCorrectRate.fillFlowsByAuxs(ActiveErrorDensity)
    ActiveErrorRetirementRate.fillFlowsByAuxs(TestingRate,
                                              ActiveErrorsRetiringFraction)
    ActiveErrorGenRate.fillFlowsByAuxs(FractionEscapingErrors, BadFixGenRate)
    PassiveErrorGenRate.fillFlowsByAuxs(BadFixGenRate, FractionEscapingErrors)
    PassiveErrorDetectAndCorrectRate.fillFlowsByAuxs(PassiveErrorDensity,
                                                     TestingRate)

    # updating current state's flows: six flows from bottom
    currStateParam.updateActiveErrorRegenRate(ActiveErrorRegenRate)
    currStateParam.updateActiveErrorDetectAndCorrectRate(
        ActiveErrorDetectAndCorrectRate)
    currStateParam.updateActiveErrorRetirementRate(ActiveErrorRetirementRate)
    currStateParam.updateActiveErrorGenRate(ActiveErrorGenRate)
    currStateParam.updatePassiveErrorGenRate(PassiveErrorGenRate)
    currStateParam.updatePassiveErrorDetectAndCorrectRate(
        PassiveErrorDetectAndCorrectRate)

    ## copying current to prev.
    prevStateParam = currStateParam.copyAll("prev")
    return prevStateParam, currStateParam
Пример #32
0
def executeModelAll(showFlows):

  ##Nine Unique Auxiliaries : Top 
  MultiplierSchedPressure = Auxiliary("MultiplierSchedPressure")
  MultiplierWorkforce = Auxiliary("MultiplierWorkforce")
  NominalErr = Auxiliary("NominalErr")
  SWDevelopmentRate = Auxiliary("SWDevelopmentRate")
  PotErrDetectRate = Auxiliary("PotErrDetectRate")
  QARate = Auxiliary("QARate")
  AvgErrPerTask = Auxiliary("AvgErrPerTask")
  ActualReworkMP = Auxiliary("ActualReworkMP")
  DailyMPRework = Auxiliary("DailyMPRework")


  ## Eight Unique Auxiliaries :Bottom
  TimeToSmooth = Auxiliary("TimeToSmooth")
  MultiplierToRegen = Auxiliary("MultiplierToRegen")
  ActiveErrorDensity = Auxiliary("ActiveErrorDensity")
  TestingRate = Auxiliary("TestingRate")
  PassiveErrorDensity = Auxiliary("PassiveErrorDensity")
  FractionEscapingErrors = Auxiliary("FractionEscapingErrors")
  ActiveErrorsRetiringFraction = Auxiliary("ActiveErrorsRetiringFraction")
  BadFixGenRate = Auxiliary("BadFixGenRate")

  ### Four Flows from Top
  ErrGenRate = Flow("ErrGenRate")
  ErrDetRate = Flow("ErrDetRate")
  ErrEscapeRate = Flow("ErrEscapeRate")
  ReworkRate = Flow("ReworkRate")


  ## Six Flows from Bottom
  ActiveErrorRegenRate = Flow("ActiveErrorRegenRate")
  ActiveErrorDetectAndCorrectRate = Flow("ActiveErrorDetectAndCorrectRate")
  ActiveErrorRetirementRate = Flow("ActiveErrorRetirementRate")
  PassiveErrorDetectAndCorrectRate = Flow("PassiveErrorDetectAndCorrectRate")
  PassiveErrorGenRate = Flow("PassiveErrorGenRate")
  ActiveErrorGenRate = Flow("ActiveErrorGenRate")

  ## we need to fill up auxiliaries ...
  auxDict = utility.createAuxiliaries_All()
  ##States
  curr = StateAll("CurrentState")
  prev = StateAll("PrevState")
  dt = 1
  ##output & test purpose
  stockDict ={}
  for key_,val_ in auxDict.items():
   # current state's stocks are dependent on prev. state's flows
   # some have in and out flows
   curr.PotentiallyDetectableError_.setInput(dt * (prev.ErrGenRate_.curr - prev.ErrDetRate_.curr - prev.ErrEscapeRate_.curr ))
   curr.DetectedError_.setInput( dt*( prev.ErrDetRate_.curr - prev.ReworkRate_.curr  ))

   # some only have in flows from top
   curr.EscapedError_.setInput( dt*(prev.ErrEscapeRate_.curr))
   curr.ReworkedError_.setInput(dt*(prev.ReworkRate_.curr))

   # Update stock from inflows and outflows from Bottom
   curr.UndetectedActiveErrors_.setInput(dt * (prev.ActiveErrorRegenRate_.curr + prev.ActiveErrorGenRate_.curr) - (prev.ActiveErrorRetirementRate_.curr + prev.ActiveErrorDetectAndCorrectRate_.curr) )
   curr.UndetectedPassiveErrors_.setInput(dt * (prev.ActiveErrorRetirementRate_.curr + prev.PassiveErrorGenRate_.curr)- prev.PassiveErrorDetectAndCorrectRate_.curr) 
   
   print "{} ---> {}".format( key_,  curr)
   #setup output
   stockDict[key_]=[curr.PotentiallyDetectableError_.curr, curr.DetectedError_.curr, curr.EscapedError_.curr, curr.ReworkedError_.curr, curr.UndetectedActiveErrors_.curr,curr.UndetectedPassiveErrors_.curr]
   print "---------------"
   #setting up auxiliaries
   MultiplierSchedPressure.setInput(val_[0])
   MultiplierWorkforce.setInput(val_[1])
   NominalErr.setInput(val_[2])
   SWDevelopmentRate.setInput(val_[3])
   PotErrDetectRate.setInput(val_[4])
   AvgErrPerTask.setInput(val_[5])
   QARate.setInput(val_[6])
   ActualReworkMP.setInput(val_[7])
   DailyMPRework.setInput(val_[8])

   #Setting up eight Auxiliaries from Bottom
   TimeToSmooth.setInput(val_[9])
   MultiplierToRegen.setInput(val_[10])
   ActiveErrorDensity.setInput(val_[11])
   TestingRate.setInput(val_[12])
   ActiveErrorsRetiringFraction.setInput(val_[13])
   PassiveErrorDensity.setInput(val_[16] + curr.UndetectedPassiveErrors_.curr)

   #filling flows on the top
   ErrGenRate.fillFlowsByAuxs(MultiplierSchedPressure, MultiplierWorkforce, NominalErr, SWDevelopmentRate)
   ErrDetRate.fillFlowsByAuxs(PotErrDetectRate)
   ErrEscapeRate.fillFlowsByAuxs(AvgErrPerTask, QARate)
   ReworkRate.fillFlowsByAuxs(ActualReworkMP, DailyMPRework)
   
   # updating current state's flows
   curr.updateErrGenRate(ErrGenRate)
   curr.updateErrDetRate(ErrDetRate)
   curr.updateErrEscapeRate(ErrEscapeRate)
   curr.updateReworkRate(ReworkRate)   

   # Connecting top to the bottom 
   # Error Escape Rate -> Fraction Escaping Errors
   # Rework Rate -> Bad Fix Generation Rate   
   FractionEscapingErrors.setInput(val_[14] + ErrEscapeRate.curr) 
   BadFixGenRate.setInput(val_[15] + ReworkRate.curr)
   
   # Filling Flows : six flows from Bottom
   ActiveErrorRegenRate.fillFlowsByAuxs(TimeToSmooth, MultiplierToRegen, ActiveErrorDensity)
   ActiveErrorDetectAndCorrectRate.fillFlowsByAuxs(ActiveErrorDensity)
   ActiveErrorRetirementRate.fillFlowsByAuxs(TestingRate, ActiveErrorsRetiringFraction)
   ActiveErrorGenRate.fillFlowsByAuxs(FractionEscapingErrors, BadFixGenRate)
   PassiveErrorGenRate.fillFlowsByAuxs(BadFixGenRate, FractionEscapingErrors)
   PassiveErrorDetectAndCorrectRate.fillFlowsByAuxs(PassiveErrorDensity, TestingRate)


   # updating current state's flows: six flows from bottom
   curr.updateActiveErrorRegenRate(ActiveErrorRegenRate)
   curr.updateActiveErrorDetectAndCorrectRate(ActiveErrorDetectAndCorrectRate)
   curr.updateActiveErrorRetirementRate(ActiveErrorRetirementRate)
   curr.updateActiveErrorGenRate(ActiveErrorGenRate)
   curr.updatePassiveErrorGenRate(PassiveErrorGenRate)
   curr.updatePassiveErrorDetectAndCorrectRate(PassiveErrorDetectAndCorrectRate)

   if(showFlows):
     print "Printing F-L-O-W-S !"  
     print "key, flow value ---> {}, {}".format(key_, curr.getFlows())      
   ## copying current to prev. 
   prev = curr.copyAll("prev") 
   print "###################"
  return stockDict
Пример #33
0
def executeModelTop(showFlows):

  ##Auxiliaries
  MultiplierSchedPressure = Auxiliary("MultiplierSchedPressure")
  MultiplierWorkforce = Auxiliary("MultiplierWorkforce")
  NominalErr = Auxiliary("NominalErr")
  SWDevelopmentRate = Auxiliary("SWDevelopmentRate")
  PotErrDetectRate = Auxiliary("PotErrDetectRate")
  QARate = Auxiliary("QARate")
  AvgErrPerTask = Auxiliary("AvgErrPerTask")
  ActualReworkMP = Auxiliary("ActualReworkMP")
  DailyMPRework = Auxiliary("DailyMPRework")
  ### Flows
  ErrGenRate = Flow("ErrGenRate")
  ErrDetRate = Flow("ErrDetRate")
  ErrEscapeRate = Flow("ErrEscapeRate")
  ReworkRate = Flow("ReworkRate")

  ## we need to fill up auxiliaries ...
  auxDict = utility.createAuxiliaries_Top()
  ##States
  curr = State("CurrentState", True)
  prev = State("PrevState", True)
  dt = 1
  ##output & test purpose
  stockDict ={}
  for key_,val_ in auxDict.items():
   # current state's stocks are dependent on prev. state's flows
   # some have in and out flows
   curr.PotentiallyDetectableError_.setInput(dt * (prev.ErrGenRate_.curr - prev.ErrDetRate_.curr - prev.ErrEscapeRate_.curr ))
   curr.DetectedError_.setInput( dt*( ErrDetRate.curr - ReworkRate.curr  ))
   # some only have in flows
   curr.EscapedError_.setInput( dt*(ErrEscapeRate.curr))
   curr.ReworkedError_.setInput(dt*(ReworkRate.curr))
   print "{} ---> {}".format( key_,  curr)
   #setup output
   stockDict[key_]=[curr.PotentiallyDetectableError_.curr, curr.DetectedError_.curr, curr.EscapedError_.curr, curr.ReworkedError_.curr]
   print "---------------"
   #setting up auxiliaries
   MultiplierSchedPressure.setInput(val_[0])
   MultiplierWorkforce.setInput(val_[1])
   NominalErr.setInput(val_[2])
   SWDevelopmentRate.setInput(val_[3])
   PotErrDetectRate.setInput(val_[4])
   AvgErrPerTask.setInput(val_[5])
   QARate.setInput(val_[6])
   ActualReworkMP.setInput(val_[7])
   DailyMPRework.setInput(val_[8])
   #filling flows
   ErrGenRate.fillFlowsByAuxs(MultiplierSchedPressure, MultiplierWorkforce, NominalErr, SWDevelopmentRate)
   ErrDetRate.fillFlowsByAuxs(PotErrDetectRate)
   ErrEscapeRate.fillFlowsByAuxs(AvgErrPerTask, QARate)
   ReworkRate.fillFlowsByAuxs(ActualReworkMP, DailyMPRework)
   # updating current state's flows
   curr.updateErrGenRate(ErrGenRate)
   curr.updateErrDetRate(ErrDetRate)
   curr.updateErrEscapeRate(ErrEscapeRate)
   curr.updateReworkRate(ReworkRate)
   if(showFlows):
    print "{} ---> {}".format( key_,  curr.getFlows())
    print "---------------"
   prev = curr.copyTop("Prev")
   #print "Prev: ZZZ ",prev_
   print "###################"
  return stockDict
Пример #34
0
from CellContainer import CellContainer
from Flow import Flow
from Layer import Layer
from SolverSlau import SolverSlau
from ThreeDimOilWaterImpes import ThreeDimOilWaterImpes
from prettytable import PrettyTable

solver_slau = SolverSlau()
impes = ThreeDimOilWaterImpes(solver_slau)

cell_container = CellContainer(
)  # Проверь на счет eq_index. Внутри реализации написано чо каво
cell_container.initialize_cells()

Flow.initialize_flow(cell_container)

time = impes.tau  # Сразу обозначим это как первый шаг по времени, потому что нулевой у нас есть
counter = 1
counter_write = []

t_debit = PrettyTable([
    'Time, days', 'Q_mass_OIL, kg/sec', 'Q_mass_WATER, kg/sec',
    'Q_vol_OIL, cub.met/sec', 'Q_vol_WATER, cub.met/sec'
])

while time < impes.time_max:
    delta_k = impes.generate_delta_k()
    if counter == 1:
        pass
    else:
        cell_container.equate_cell_states()
Пример #35
0
            X = self.__scaler.transform(X)
            return self.__searcher.predict(X)

    return SearchScale


if __name__ == "__main__":
    import numpy as np
    from sklearn.kernel_ridge import KernelRidge
    from TSeries.MLTK.Scaler import Scaler
    from TSeries.MLTK.Searcher import CVSearcher

    from Flow import Flow
    from Wrangler import Wrangler

    tflow = Flow().fetch("test").clean()
    wrangler = Wrangler(tflow, {
        "spot_ahead": 7,
        "consumption_to_ahead": 2,
        "generation_ahead": 1
    }, {
        "spot_rollings": [15, 30],
        "consumption_rollings": [7, 15],
        "generation_rollings": [3, 7, 15]
    })
    Xtrain, Ytrain, Xtest, Ytest = wrangler.train_test("20120101",
                                                       "20141231",
                                                       "20150101",
                                                       "20160105",
                                                       "D",
                                                       predict_ahead=1)