def breakEntity(self, entity, buffer, station, totalAvailableCapacity,
                 totalRequestedCapacity):
     # calculate what is the capacity that should proceed and what that should remain
     capacityToMove = totalAvailableCapacity * (
         entity.requiredCapacity) / float(totalRequestedCapacity)
     capacityToStay = entity.requiredCapacity - capacityToMove
     # if capacityToMove is equal to 0 no need to break. Return false.
     if capacityToMove == 0:
         return False
     else:
         # remove the capacity entity by the buffer so that the broken ones are created
         buffer.getActiveObjectQueue().remove(entity)
         entityToMoveName = entity.capacityProjectId + '_' + station.objName + '_' + str(
             capacityToMove)
         entityToMove = CapacityEntity(
             name=entityToMoveName,
             capacityProjectId=entity.capacityProjectId,
             requiredCapacity=capacityToMove)
         entityToMove.initialize()
         entityToMove.currentStation = buffer
         entityToMove.shouldMove = True
         entityToStayName = entity.capacityProjectId + '_' + station.objName + '_' + str(
             capacityToStay)
         entityToStay = CapacityEntity(
             name=entityToStayName,
             capacityProjectId=entity.capacityProjectId,
             requiredCapacity=capacityToStay)
         entityToStay.initialize()
         entityToStay.currentStation = buffer
         import dream.simulation.Globals as Globals
         Globals.setWIP([entityToMove,
                         entityToStay])  #set the new components as wip
         return True
    def createInCapacityStationBuffers(self):
        # loop through the exits
        for exit in G.CapacityStationExitList:
            # if the exit received nothing currently there is nothing to do
            if exit.currentlyObtainedEntities == []:
                continue
            buffer = exit.nextCapacityStationBuffer  # the next buffer
            # if it is the end of the system there is nothing to do
            if not buffer:
                exit.currentlyObtainedEntities = []
                continue
            previousStation = exit.previous[
                0]  # the station the the entity just finished from
            previousBuffer = previousStation.previous[
                0]  # the buffer of the station
            nextStation = buffer.next[0]  # the next processing station
            # for every entity calculate the new entity to be created in the next station and create it
            for entity in exit.currentlyObtainedEntities:
                project = entity.capacityProject
                # if the entity exits from an assembly station
                # and not all project is finished there, then do not create anything in the next
                if previousBuffer.requireFullProject:
                    projectFinishedFromLast = True
                    for e in previousBuffer.getActiveObjectQueue():
                        if e.capacityProject == project:
                            projectFinishedFromLast = False
                            break
                    if not projectFinishedFromLast:
                        continue

                entityCapacity = entity.requiredCapacity
                previousRequirement = float(
                    project.capacityRequirementDict[previousStation.id])
                nextRequirement = float(
                    project.capacityRequirementDict[nextStation.id])
                # if the previous station was assembly then in the next the full project arrives
                # so requires whatever the project requires
                if previousBuffer.requireFullProject:
                    nextStationCapacityRequirement = nextRequirement
                # else calculate proportionally
                else:
                    proportion = nextRequirement / previousRequirement
                    nextStationCapacityRequirement = proportion * entityCapacity
                entityToCreateName = entity.capacityProjectId + '_' + nextStation.objName + '_' + str(
                    nextStationCapacityRequirement)
                entityToCreate = CapacityEntity(
                    name=entityToCreateName,
                    capacityProjectId=entity.capacityProjectId,
                    requiredCapacity=nextStationCapacityRequirement)
                entityToCreate.currentStation = buffer
                entityToCreate.initialize()
                import dream.simulation.Globals as Globals
                Globals.setWIP([entityToCreate
                                ])  #set the new components as wip
            # reset the currently obtained entities list to empty
            exit.currentlyObtainedEntities = []
 def createInCapacityStationBuffers(self): 
     # loop through the exits   
     for exit in G.CapacityStationExitList:
         # if the exit received nothing currently there is nothing to do
         if exit.currentlyObtainedEntities==[]:
             continue
         buffer=exit.nextCapacityStationBuffer   # the next buffer
         # if it is the end of the system there is nothing to do
         if not buffer:
             exit.currentlyObtainedEntities=[]
             continue
         previousStation=exit.previous[0]  # the station the the entity just finished from
         previousBuffer=previousStation.previous[0]  # the buffer of the station
         nextStation=buffer.next[0]        # the next processing station
         # for every entity calculate the new entity to be created in the next station and create it  
         for entity in exit.currentlyObtainedEntities:
             project=entity.capacityProject
             # if the entity exits from an assembly station 
             # and not all project is finished there, then do not create anything in the next
             if previousBuffer.requireFullProject:
                 projectFinishedFromLast=True
                 for e in previousBuffer.getActiveObjectQueue():
                     if e.capacityProject==project:
                         projectFinishedFromLast=False
                         break
                 if not projectFinishedFromLast:
                     continue
                 
             entityCapacity=entity.requiredCapacity
             previousRequirement=float(project.capacityRequirementDict[previousStation.id])
             nextRequirement=float(project.capacityRequirementDict[nextStation.id])
             # if the previous station was assembly then in the next the full project arrives
             # so requires whatever the project requires
             if previousBuffer.requireFullProject:
                 nextStationCapacityRequirement=nextRequirement
             # else calculate proportionally
             else:
                 proportion=nextRequirement/previousRequirement
                 nextStationCapacityRequirement=proportion*entityCapacity
             entityToCreateName=entity.capacityProjectId+'_'+nextStation.objName+'_'+str(nextStationCapacityRequirement)
             entityToCreate=CapacityEntity(name=entityToCreateName, capacityProjectId=entity.capacityProjectId, 
                                           requiredCapacity=nextStationCapacityRequirement)
             entityToCreate.currentStation=buffer
             entityToCreate.initialize()
             import dream.simulation.Globals as Globals
             Globals.setWIP([entityToCreate])     #set the new components as wip                
         # reset the currently obtained entities list to empty
         exit.currentlyObtainedEntities=[]
 def mergeEntities(self):
     # loop through the capacity station buffers
     for buffer in G.CapacityStationBufferList:
         nextStation = buffer.next[0]
         projectList = []
         # loop through the entities to see what projects lie in the buffer
         for entity in buffer.getActiveObjectQueue():
             if entity.capacityProject not in projectList:
                 projectList.append(entity.capacityProject)
         for project in projectList:
             entitiesToBeMerged = []
             for entity in buffer.getActiveObjectQueue():
                 if entity.capacityProject == project:
                     entitiesToBeMerged.append(entity)
             totalCapacityRequirement = 0
             # if the buffer acts as assembly there is no need to calculate the total capacity requirement,
             # it will be the one that the project has as a total for this station
             if buffer.requireFullProject:
                 # find what has been already processed
                 alreadyProcessed = 0
                 for record in nextStation.detailedWorkPlan:
                     if record['project'] == project.id:
                         alreadyProcessed += float(record['allocation'])
                 totalCapacityRequirement = project.capacityRequirementDict[
                     nextStation.id] - alreadyProcessed
             # else calculate the total capacity requirement by adding the one each entity requires
             else:
                 for entity in entitiesToBeMerged:
                     totalCapacityRequirement += entity.requiredCapacity
             # erase the Entities to create the merged one
             for entity in entitiesToBeMerged:
                 buffer.getActiveObjectQueue().remove(entity)
             # create the merged entity
             entityToCreateName = entity.capacityProjectId + '_' + nextStation.objName + '_' + str(
                 totalCapacityRequirement)
             entityToCreate = CapacityEntity(
                 name=entityToCreateName,
                 capacityProjectId=project.id,
                 requiredCapacity=totalCapacityRequirement)
             entityToCreate.currentStation = buffer
             entityToCreate.initialize()
             import dream.simulation.Globals as Globals
             Globals.setWIP([entityToCreate
                             ])  #set the new components as wip
 def mergeEntities(self):
     # loop through the capacity station buffers
     for buffer in G.CapacityStationBufferList:
         nextStation=buffer.next[0]
         projectList=[]
         # loop through the entities to see what projects lie in the buffer
         for entity in buffer.getActiveObjectQueue():
             if entity.capacityProject not in projectList:
                 projectList.append(entity.capacityProject)
         for project in projectList:
             entitiesToBeMerged=[]
             for entity in buffer.getActiveObjectQueue():
                 if entity.capacityProject==project:
                     entitiesToBeMerged.append(entity)
             totalCapacityRequirement=0
             # if the buffer acts as assembly there is no need to calculate the total capacity requirement, 
             # it will be the one that the project has as a total for this station
             if buffer.requireFullProject:
                 # find what has been already processed
                 alreadyProcessed=0     
                 for record in nextStation.detailedWorkPlan:
                     if record['project']==project.id:
                         alreadyProcessed+=float(record['allocation'])
                 totalCapacityRequirement=project.capacityRequirementDict[nextStation.id]-alreadyProcessed
             # else calculate the total capacity requirement by adding the one each entity requires
             else:
                 for entity in entitiesToBeMerged:
                     totalCapacityRequirement+=entity.requiredCapacity
             # erase the Entities to create the merged one
             for entity in entitiesToBeMerged:
                 buffer.getActiveObjectQueue().remove(entity)
             # create the merged entity
             entityToCreateName=entity.capacityProjectId+'_'+nextStation.objName+'_'+str(totalCapacityRequirement)
             entityToCreate=CapacityEntity(name=entityToCreateName, capacityProjectId=project.id, 
                                           requiredCapacity=totalCapacityRequirement)
             entityToCreate.currentStation=buffer
             entityToCreate.initialize()
             import dream.simulation.Globals as Globals
             Globals.setWIP([entityToCreate])     #set the new components as wip                                            
 def breakEntity(self, entity, buffer, station, totalAvailableCapacity, totalRequestedCapacity):
     # calculate what is the capacity that should proceed and what that should remain
     capacityToMove=totalAvailableCapacity*(entity.requiredCapacity)/float(totalRequestedCapacity)
     capacityToStay=entity.requiredCapacity-capacityToMove
     # if capacityToMove is equal to 0 no need to break. Return false.
     if capacityToMove==0:
         return False
     else:
         # remove the capacity entity by the buffer so that the broken ones are created
         buffer.getActiveObjectQueue().remove(entity)
         entityToMoveName=entity.capacityProjectId+'_'+station.objName+'_'+str(capacityToMove)
         entityToMove=CapacityEntity(name=entityToMoveName, capacityProjectId=entity.capacityProjectId, requiredCapacity=capacityToMove)
         entityToMove.initialize()
         entityToMove.currentStation=buffer
         entityToMove.shouldMove=True
         entityToStayName=entity.capacityProjectId+'_'+station.objName+'_'+str(capacityToStay)
         entityToStay=CapacityEntity(name=entityToStayName, capacityProjectId=entity.capacityProjectId, requiredCapacity=capacityToStay)
         entityToStay.initialize()
         entityToStay.currentStation=buffer
         import dream.simulation.Globals as Globals
         Globals.setWIP([entityToMove,entityToStay])     #set the new components as wip    
         return True   
def main(argv=[], input_data=None):
    argv = argv or sys.argv[1:]

    #create an empty list to store all the objects in
    G.ObjList = []
    G.RouterList = []

    if input_data is None:
        # user passes the topology filename as first argument to the program
        filename = argv[0]
        try:  # try to open the file with the inputs
            G.JSONFile = open(
                filename, "r")  # global variable holding the file to be opened
        except IOError:
            print "%s could not be open" % filename
            return "ERROR"
        G.InputData = G.JSONFile.read(
        )  # pass the contents of the input file to the global var InputData
    else:
        G.InputData = input_data
    start = time.time()  # start counting execution time

    #read the input from the JSON file and create the line
    G.JSONData = json.loads(G.InputData)  # create the dictionary JSONData
    readGeneralInput()
    createObjectResourcesAndCoreObjects()
    createObjectInterruptions()
    setTopology()

    #run the experiment (replications)
    for i in xrange(G.numberOfReplications):
        G.env = simpy.Environment()  # initialize the environment
        G.maxSimTime = float(G.JSONData['general'].get(
            'maxSimTime', '100'))  # read the maxSimTime in each replication
        # since it may be changed for infinite ones
        if G.RouterList:
            G.RouterList[0].isActivated = False
            G.RouterList[0].isInitialized = False

        if G.seed:
            G.Rnd = Random('%s%s' % (G.seed, i))
            G.numpyRnd.random.seed(G.seed)
        else:
            G.Rnd = Random()
            G.numpyRnd.random.seed()
        createWIP()
        initializeObjects()
        Globals.setWIP(G.EntityList)
        activateObjects()

        # if the simulation is ran until no more events are scheduled,
        # then we have to find the end time as the time the last entity ended.
        if G.maxSimTime == -1:
            # If someone does it for a model that has always events, then it will run forever!
            G.env.run(until=float('inf'))

            # identify from the exits what is the time that the last entity has ended.
            endList = []
            for exit in G.ExitList:
                endList.append(exit.timeLastEntityLeft)

            # identify the time of the last event
            if float(max(endList)) != 0 and G.env.now == float(
                    'inf'
            ):  #do not let G.maxSimTime=0 so that there will be no crash
                G.maxSimTime = float(max(endList))
            else:
                print "simulation ran for 0 time, something may have gone wrong"
                logger.info(
                    "simulation ran for 0 time, something may have gone wrong")
        #else we simulate until the given maxSimTime
        else:
            G.env.run(until=G.maxSimTime)

        #carry on the post processing operations for every object in the topology
        for element in G.ObjList + G.ObjectResourceList + G.RouterList:
            element.postProcessing()

        # added for debugging, print the Route of the Jobs on the same G.traceFile
        PrintRoute.outputRoute()

        #output trace to excel
        if (G.trace == "Yes"):
            ExcelHandler.outputTrace('trace' + str(i))
            import StringIO
            traceStringIO = StringIO.StringIO()
            G.traceFile.save(traceStringIO)
            encodedTrace = traceStringIO.getvalue().encode('base64')
            ExcelHandler.resetTrace()

    G.outputJSON['_class'] = 'Dream.Simulation'
    G.outputJSON['general'] = {}
    G.outputJSON['general']['_class'] = 'Dream.Configuration'
    G.outputJSON['general']['totalExecutionTime'] = (time.time() - start)
    G.outputJSON['elementList'] = []

    #output data to JSON for every object in the topology
    for object in G.ObjectResourceList + G.EntityList + G.ObjList + G.RouterList:
        object.outputResultsJSON()

    # output the trace as encoded if it is set on
    if G.trace == "Yes":
        # XXX discuss names on this
        jsonTRACE = {
            '_class': 'Dream.Simulation',
            'id': 'TraceFile',
            'results': {
                'trace': encodedTrace
            }
        }
        G.outputJSON['elementList'].append(jsonTRACE)

    outputJSONString = json.dumps(G.outputJSON, indent=True)
    if 0:
        G.outputJSONFile = open('outputJSON.json', mode='w')
        G.outputJSONFile.write(outputJSONString)
    if not input_data:
        # Output on stdout
        print outputJSONString
        # XXX I am not sure we still need this case
        return

    # XXX result_list is not needed here, we could replace result by result_list
    G.JSONData['result'] = {'result_list': [G.outputJSON]}
    #logger.info("execution time="+str(time.time()-start))

    return json.dumps(G.JSONData, indent=True)
def main(argv=[], input_data=None):
    argv = argv or sys.argv[1:]

    #create an empty list to store all the objects in   
    G.ObjList=[]
    G.RouterList=[]

    if input_data is None:
      # user passes the topology filename as first argument to the program
      filename = argv[0]
      try:                                          # try to open the file with the inputs
          G.JSONFile=open(filename, "r")            # global variable holding the file to be opened
      except IOError:                               
          print "%s could not be open" % filename
          return "ERROR"
      G.InputData=G.JSONFile.read()                 # pass the contents of the input file to the global var InputData
    else:
      G.InputData = input_data
    start=time.time()                               # start counting execution time 

    #read the input from the JSON file and create the line
    G.JSONData=json.loads(G.InputData)              # create the dictionary JSONData
    readGeneralInput()
    createObjectResourcesAndCoreObjects()
    createObjectInterruptions()
    setTopology()

    #run the experiment (replications)          
    for i in xrange(G.numberOfReplications):
        G.env=simpy.Environment()                       # initialize the environment
        G.maxSimTime=float(G.JSONData['general'].get('maxSimTime', '100'))     # read the maxSimTime in each replication 
                                                                               # since it may be changed for infinite ones
        if G.RouterList:
            G.RouterList[0].isActivated=False
            G.RouterList[0].isInitialized=False
        
        if G.seed:
            G.Rnd=Random('%s%s' % (G.seed, i))
            G.numpyRnd.random.seed(G.seed)
        else:
            G.Rnd=Random()
            G.numpyRnd.random.seed()
        createWIP()
        initializeObjects()
        Globals.setWIP(G.EntityList)        
        activateObjects()
        
        # if the simulation is ran until no more events are scheduled, 
        # then we have to find the end time as the time the last entity ended.
        if G.maxSimTime==-1:
            # If someone does it for a model that has always events, then it will run forever!
            G.env.run(until=float('inf'))
                                         
            # identify from the exits what is the time that the last entity has ended. 
            endList=[]
            for exit in G.ExitList:
                endList.append(exit.timeLastEntityLeft)
  
            # identify the time of the last event
            if float(max(endList))!=0 and G.env.now==float('inf'):    #do not let G.maxSimTime=0 so that there will be no crash
                G.maxSimTime=float(max(endList))
            else:
                print "simulation ran for 0 time, something may have gone wrong"
                logger.info("simulation ran for 0 time, something may have gone wrong")
        #else we simulate until the given maxSimTime
        else:
            G.env.run(until=G.maxSimTime)
        
        #carry on the post processing operations for every object in the topology       
        for element in G.ObjList+G.ObjectResourceList+G.RouterList:
            element.postProcessing()
                       
        # added for debugging, print the Route of the Jobs on the same G.traceFile
        PrintRoute.outputRoute()
            
        #output trace to excel      
        if(G.trace=="Yes"):
            ExcelHandler.outputTrace('trace'+str(i))  
            import StringIO
            traceStringIO = StringIO.StringIO()
            G.traceFile.save(traceStringIO)
            encodedTrace=traceStringIO.getvalue().encode('base64')
            ExcelHandler.resetTrace()
    
    G.outputJSON['_class'] = 'Dream.Simulation';
    G.outputJSON['general'] ={};
    G.outputJSON['general']['_class'] = 'Dream.Configuration';
    G.outputJSON['general']['totalExecutionTime'] = (time.time()-start);
    G.outputJSON['elementList'] =[];
    
        
    #output data to JSON for every object in the topology         
    for object in G.ObjectResourceList + G.EntityList + G.ObjList+G.RouterList:
        object.outputResultsJSON()
        
    # output the trace as encoded if it is set on
    if G.trace=="Yes":
        # XXX discuss names on this
        jsonTRACE = {'_class': 'Dream.Simulation',
                'id': 'TraceFile',
                'results': {'trace':encodedTrace}
            }
        G.outputJSON['elementList'].append(jsonTRACE)
        
        
    outputJSONString=json.dumps(G.outputJSON, indent=True)
    if 0:
      G.outputJSONFile=open('outputJSON.json', mode='w')
      G.outputJSONFile.write(outputJSONString)
    if not input_data:
      # Output on stdout
      print outputJSONString
      # XXX I am not sure we still need this case
      return

    # XXX result_list is not needed here, we could replace result by result_list
    G.JSONData['result'] = {'result_list': [G.outputJSON]}
    #logger.info("execution time="+str(time.time()-start))

    return json.dumps(G.JSONData, indent=True)