示例#1
0
def execMinpOrder(y, w, threshold=1, conseq='none'):
	"""Min-p-regions model (Order formulation)

	The min-p-regions model, devised by [Duque_...2014]_ ,
	clusters a set of geographic areas into the minimum number of homogeneous
	regions such that the value of a spatially extensive regional attribute is
	above a predefined threshold value. In clusterPy we measure heterogeneity as
	the within-cluster sum of squares from each area to the attribute centroid
	of its cluster. ::   
	
		layer.cluster('minpOrder',vars,<threshold>,<wType>,<std>,<dissolve>,<dataOperations>)

	:keyword vars: Area attribute(s) (e.g. ['SAR1','SAR2','POP'])  
	:type vars: list
    :keyword threshold: Minimum value of the constrained variable at regional level. Default value threshold = 100.
    :type threshold: integer
	:keyword wType: Type of first-order contiguity-based spatial matrix: 'rook' or 'queen'. Default value wType = 'rook'. 
	:type wType: string
	:keyword std: If = 1, then the variables will be standardized.
	:type std: binary
	:keyword dissolve: If = 1, then you will get a "child" instance of the layer that contains the new regions. Default value = 0. Note: Each child layer is saved in the attribute layer.results. The first algorithm that you run with dissolve=1 will have a child layer in layer.results[0]; the second algorithm that you run with dissolve=1 will be in layer.results[1], and so on. You can export a child as a shapefile with layer.result[<1,2,3..>].exportArcData('filename')
	:type dissolve: binary
	:keyword dataOperations: Dictionary which maps a variable to a list of operations to run on it. The dissolved layer will contains in it's data all the variables specified in this dictionary. Be sure to check the input layer's fieldNames before use this utility.
	:type dataOperations: dictionary

	The dictionary structure must be as showed bellow.

	>>> X = {}
	>>> X[variableName1] = [function1, function2,....]
	>>> X[variableName2] = [function1, function2,....]

	Where functions are strings which represents the name of the 
	functions to be used on the given variableName. Functions 
	could be,'sum','mean','min','max','meanDesv','stdDesv','med',
	'mode','range','first','last','numberOfAreas. By default just 
	ID variable is added to the dissolved map."""

	print "Running min-p-regions model (Duque 2014)"
	print "Order formulation"
	print "Number of areas: ", len(y) 
	print "threshold value: ", threshold

		
	start = tm.time()

	# Number of areas
	n = len(y)
	q = n-1

	# Area iterator
	numA = range(n)
	# Order iterator
	numO = range(q)

	z = {}
	l = {} #spatially extensive attribute
	for i in numA:
		z[i] = y[i][0]
		l[i] = y[i][1]
		
		
		
	d = nm.zeros(shape = (n,n))
	for i in numA:
		for j in numA:
			d[i,j]=distanceA2AEuclideanSquared([[z[i]],[z[j]]])[0][0]

		
	# h: scaling factor
	temp = 0
	for i in numA:
		for j in numA:
			if i<j:
				temp += d[i][j]
	h = 1+ nm.floor(nm.log(temp))


	#-----------------------------------
	try: 
	# Create the new model
		m=Model("maxpRegions")
		
		#tol=1e-5

		# Create variables

		# t_ij
		# 1 if areas i and j belongs to the same region
		# 0 otherwise
		t = []
		for i in numA:
			t_i = []
			for j in numA:
				t_i.append(m.addVar(vtype=GRB.BINARY,name="t_"+str([i,j])))
			t.append(t_i)

		# x_ikc
		# 1 if area i is assigned to region k in order c
		# 0 otherwise
		x = []
		for i in numA:
			x_i=[]
			for k in numA:
				x_ik=[]
				for c in numO:
					x_ik.append(m.addVar(vtype=GRB.BINARY,name="x_"+str([i,k,c])))
				x_i.append(x_ik)
			x.append(x_i)
			  
		# Integrate new variables
		m.update()

		# Objective function
		temp1 = []
		temp2 = []
		for i in numA:
			# Number of regions
			for k in numA:
				temp1.append(x[i][k][0])
			# Total heterogeneity
			for j in numA:
				temp2.append(d[i][j]*t[i][j])

		m.setObjective((10**float(h))*quicksum(temp1)+quicksum(temp2), GRB.MINIMIZE)

		# Constraints 1
		for k in numA:
			temp = []
			for i in numA:
				temp.append(x[i][k][0])
			m.addConstr(quicksum(temp)<=1,"c1_"+str([k,0]))
			
			
		# Constraints 2		
		for i in numA:
			temp = []
			for k in numA:
				for c in numO:
					temp.append(x[i][k][c]) #+= x[i][k][c]
			m.addConstr(quicksum(temp) == 1,"c2_"+str([i]))
			#m.addConstr(quicksum(temp) >= 1-tol,"c2_"+str([i]))
			
		# Constraints 3		
		for i in numA:
			for k in numA:
				for c in range(1,q):
					temp = []
					for j in w[i]:
						temp.append(x[j][k][c-1])
					#m.addConstr(x[i][k][c]-quicksum(temp) <= tol,"c3_"+str([i,k,c]))
					m.addConstr(x[i][k][c]-quicksum(temp) <= 0,"c3_"+str([i,k,c]))
		
		# Constraints 4
		for i in numA:
			for j in numA:
				if i!=j:
					for k in numA:
						temp = []
						for c in numO:
							temp.append(x[i][k][c]+x[j][k][c])
						#m.addConstr(quicksum(temp)-t[i][j]-t[j][i]-1 <= tol,"c4_"+str([i,j,k]))				
						m.addConstr(quicksum(temp)-t[i][j]-t[j][i]-1 <= 0,"c4_"+str([i,j,k]))				

		# Constraints 5
		for i in numA:
			for j in numA:
				m.addConstr(t[i][j]+t[j][i]<= 1,"c5_"+str([i,j]))

		# Constraints 6
		for i in numA:
			for j in numA:
				m.addConstr(t[i][j]*(l[i]-l[j])>= 0,"c6_"+str([i,j]))

		# Constraints 7
		for i in numA:
			for j in numA:
				m.addConstr(t[i][j]*threshold>= t[i][j]*(l[i]-l[j]),"c7_"+str([i,j]))	
		
		
		m.update()

		#To reduce memory use
		m.setParam('Nodefilestart',0.1)
		#m.setParam('OutputFlag',False)
		#m.setParam('IntFeasTol', tol)
		m.setParam('LogFile', 'minpO-'+str(conseq)+'-'+str(n)+'-'+str(threshold))
		m.params.timeLimit = 10800#1800
			

		m.optimize()
		 
		time = tm.time()-start

		sol = [0 for u in numA]
		
		reg= []
		for i in numA:
			for k in numA:
				for c in numO:
					if x[i][k][c].x==1:
						#if the region cannot be found
						if reg.count(k)==0:
							reg.append(k)
		p=len(reg)
		
			
		for v in m.getVars():
			if v.x >0:
				print v.varName, v.x
				
		#import pdb; pdb.set_trace()
		  
		#print "p:", regID
		#print 'FINAL SOLUTION:', sol
		#print 'FINAL OF:', m.objVal
		#print "running time", time
		output = { "objectiveFunction": m.objVal,
			"bestBound": m.objBound,
			"running time": time,
			"algorithm": "minpOrder",
			"regions" : 'none',#len(sol),
			"r2a": 'none',#sol,
			"p": p,
			"distanceType" :  "EuclideanSquared",
			"distanceStat" : "None",
			"selectionType" : "None",
			"ObjectiveFunctionType" : "None"} 
		print "Done"
		return output
				
	except GurobiError:
		print 'Error reported'
示例#2
0
def execPregionsExact(y, w, p=2,rho='none', inst='none', conseq='none'):

    #EXPLICAR QUÉ ES EL P-REGIONS
    """P-regions model

    The p-regions model, devised by [Duque_Church_Middleton2009]_, 
	clusters a set of geographic areas into p spatially contiguous 
	regions while minimizing within cluster heterogeneity.
    In Clusterpy, the p-regions model is formulated as a mixed 
	integer-programming (MIP) problem and solved using the 
	Gurobi optimizer. ::
	
    #CÓMO CORRERLO layer.cluster(...)
		layer.cluster('pRegionsExact',vars,<p>,<wType>,<std>,<dissolve>,<dataOperations>)

    :keyword vars: Area attribute(s) (e.g. ['SAR1','SAR2','POP'])  
    :type vars: list
    :keyword p: Number of spatially contiguous regions to be generated. Default value p = 2.
	:type p: integer
    :keyword wType: Type of first-order contiguity-based spatial matrix: 'rook' or 'queen'. Default value wType = 'rook'. 
    :type wType: string
    :keyword std: If = 1, then the variables will be standardized.
    :type std: binary
    :keyword dissolve: If = 1, then you will get a "child" instance of the layer that contains the new regions. Default value = 0. Note: Each child layer is saved in the attribute layer.results. The first algorithm that you run with dissolve=1 will have a child layer in layer.results[0]; the second algorithm that you run with dissolve=1 will be in layer.results[1], and so on. You can export a child as a shapefile with layer.result[<1,2,3..>].exportArcData('filename')
    :type dissolve: binary
    :keyword dataOperations: Dictionary which maps a variable to a list of operations to run on it. The dissolved layer will contains in it's data all the variables specified in this dictionary. Be sure to check the input layer's fieldNames before use this utility.
    :type dataOperations: dictionary
 
    The dictionary structure must be as showed bellow.

    >>> X = {}
    >>> X[variableName1] = [function1, function2,....]
    >>> X[variableName2] = [function1, function2,....]

    Where functions are strings which represents the name of the 
    functions to be used on the given variableName. Functions 
    could be,'sum','mean','min','max','meanDesv','stdDesv','med',
    'mode','range','first','last','numberOfAreas. By deffault just
    ID variable is added to the dissolved map.
    """

    # print "Running p-regions model  (Duque, Church and Middleton, 2009)"
    # print "Number of areas: ", len(y) 
    # print "Number of regions: ", p, "\n"
	#import pdb; pdb.set_trace()
    start = tm.time()

	# PARAMETERS
	
    # Number of areas
    n = len(y)
    l=n-p

    # Area iterator
    numA = list(range(n))

    d={}
    temp=list(range(n-1))
    for i in temp:
        list1=[]
        for j in numA:
            if i<j:
                list1.append(distanceA2AEuclideanSquared([y[i],y[j]])[0][0])
        d[i]=list1

#-----------------------------------
    try: 
	
		# CONSTRUCTION OF THE MODEL
		
		# Tolerance to non-integer solutions
        tol = 1e-5 #min value: 1e-9
				
	
		# Create the new model
        m=Model("pRegions")

		# Create variables

		# t_ij
		# 1 if areas i and j belongs to the same region
		# 0 otherwise
        t = []
        for i in numA:
            t_i = []
            for j in numA:
                t_i.append(m.addVar(vtype=GRB.BINARY,name="t_"+str([i,j])))
            t.append(t_i)

		# x_ij
		# 1 if arc between adjacent areas i and j is selected for a tree graph
		# 0 otherwise
        x = []
        for i in numA:
            x_i=[]
            for j in numA:
                x_i.append(m.addVar(vtype=GRB.BINARY,name="x_"+str([i,j])))
            x.append(x_i)
			
		# u_i
		# Order assigned to each area i in a tree
        u = []
        for i in numA:
			#u.append(m.addVar(lb=1-tol, ub=n-p-tol, vtype=GRB.INTEGER,name="u_"+str(i)))
            u.append(m.addVar(lb=1, vtype=GRB.INTEGER,name="u_"+str(i)))

			  
		# Integrate new variables
        m.update()
	
		# Objective function
        
        of=0
        for i in numA:
            for j in range(i+1,n):
                of+=t[i][j]*d[i][j-i-1]
        
        m.setObjective(of, GRB.MINIMIZE)

		# Constraints 1, 5, 6
        temp = 0
        for i in numA:
            for j in w[i]:
                temp += x[i][j]
				#m.addConstr(x[i][j]-t[i][j]<=tol,"c5_"+str([i,j]))
                m.addConstr(x[i][j]-t[i][j]<=0,"c5_"+str([i,j]))
				#m.addConstr(u[i]-u[j]+(n-p)*x[i][j]+(n-p-2)*x[j][i]<=(l-tol-1),"c6_"+str([i,j]))
                m.addConstr(u[i]-u[j]+(n-p)*x[i][j]+(n-p-2)*x[j][i]<=(l-1),"c6_"+str([i,j]))
 		
		#m.addConstr(temp == l-tol,"c1")
        m.addConstr(temp == l,"c1")

		# Constraint 2
        i = 0
        for x_i in x:
            temp =[]
            for j in w[i]:
                temp.append(x_i[j])
			#m.addConstr(quicksum(temp) <=1-tol, "c2_"+str(i))
            m.addConstr(quicksum(temp) <=1, "c2_"+str(i))
            i += 1

		# Constraints 3, 4
        for i in numA:
            for j in numA:
                if i!=j:
					#m.addConstr(t[i][j]-t[j][i]<=tol,"c4_"+str([i,j]))
                    m.addConstr(t[i][j]-t[j][i]==0,"c4_"+str([i,j]))
                    for em in numA:
                        if em!=j:
							#m.addConstr(t[i][j]+t[i][em]-t[j][em]<=1-tol,"c3_"+str([i,j,em]))
                            m.addConstr(t[i][j]+t[i][em]-t[j][em]<=1,"c3_"+str([i,j,em]))
		
		
		#Constraint REDUNDANTE
        for i in numA:
            for j in numA:
                if i!=j:	
                    m.addConstr(x[i][j]+x[j][i]<=1,"c3_"+str([i,j,em]))
        
        m.update()

					
		#Writes the .lp file format of the model
		#m.write("test.lp")
		#To reduce memory use
		#m.setParam('Threads',1)
		# To disable optimization output
		#m.setParam('OutputFlag',False)
		#m.setParam('ScaleFlag',0)
		# To set the tolerance to non-integer solutions
        m.setParam('IntFeasTol', tol)
        m.setParam('LogFile', 'E-'+str(conseq)+'-'+str(n)+'-'+str(p)+'-'+str(rho)+'-'+str(inst))
        m.params.timeLimit = 1800
        m.optimize()
		
		#do IIS
		# print 'The model is infeasible; computing IIS'
		# m.computeIIS()
		# print '\nThe following constraint(s) cannot be satisfied:'
		# for c in m.getConstrs():
			# if c.IISConstr:
				# print c.constrName
        
        time = tm.time()-start
   				
		# for v in m.getVars():
			# if v.x >0:
				# print v.varName, v.x
		  		
		# sol = [0 for k in numA]
		# num = list(numA)
		# regID=0 #Number of region
		# while num:
			# area = num[0]
			# sol[area]=regID
			# f = num.remove(area)
			# for j in numA:
				# if t[area][j].x>=1-tol:#==1:
					# sol[j] = regID
					# if num.count(j)!=0:
						# b = num.remove(j)
			# regID += 1
		
		# print 'FINAL SOLUTION:', sol
		# print 'FINAL OF:', m.objVal
		# print 'FINAL bound:', m.objBound
		# print 'GAP:', m.MIPGap
		# print "running time", time
        output = { "objectiveFunction": m.objVal,
			"bestBound": m.objBound,
			"running time": time,
			"algorithm": "pRegionsExact",
			#"regions" : len(sol),
			"r2a": "None",#sol,
			"distanceType" :  "EuclideanSquared",
			"distanceStat" : "None",
			"selectionType" : "None",
			"ObjectiveFunctionType" : "None"} 
        print("Done")
        return output
                
    except GurobiError:
        print('Error reported')
示例#3
0
def execPregionsExactCP(y, w, p=2, rho='none', inst='none', conseq='none'):

    #EXPLICAR QUÉ ES EL P-REGIONS
    """P-regions model

    The p-regions model, devised by [Duque_Church_Middleton2009]_, 
	clusters a set of geographic areas into p spatially contiguous 
	regions while minimizing within cluster heterogeneity.
    In Clusterpy, the p-regions model is formulated as a mixed 
	integer-programming (MIP) problem and solved using the 
	Gurobi optimizer. ::
	
    #CÓMO CORRERLO layer.cluster(...)
		layer.cluster('pRegionsExact',vars,<p>,<wType>,<std>,<dissolve>,<dataOperations>)

    :keyword vars: Area attribute(s) (e.g. ['SAR1','SAR2','POP'])  
    :type vars: list
    :keyword p: Number of spatially contiguous regions to be generated. Default value p = 2.
	:type p: integer
    :keyword wType: Type of first-order contiguity-based spatial matrix: 'rook' or 'queen'. Default value wType = 'rook'. 
    :type wType: string
    :keyword std: If = 1, then the variables will be standardized.
    :type std: binary
    :keyword dissolve: If = 1, then you will get a "child" instance of the layer that contains the new regions. Default value = 0. Note: Each child layer is saved in the attribute layer.results. The first algorithm that you run with dissolve=1 will have a child layer in layer.results[0]; the second algorithm that you run with dissolve=1 will be in layer.results[1], and so on. You can export a child as a shapefile with layer.result[<1,2,3..>].exportArcData('filename')
    :type dissolve: binary
    :keyword dataOperations: Dictionary which maps a variable to a list of operations to run on it. The dissolved layer will contains in it's data all the variables specified in this dictionary. Be sure to check the input layer's fieldNames before use this utility.
    :type dataOperations: dictionary
 
    The dictionary structure must be as showed bellow.

    >>> X = {}
    >>> X[variableName1] = [function1, function2,....]
    >>> X[variableName2] = [function1, function2,....]

    Where functions are strings which represents the name of the 
    functions to be used on the given variableName. Functions 
    could be,'sum','mean','min','max','meanDesv','stdDesv','med',
    'mode','range','first','last','numberOfAreas. By deffault just
    ID variable is added to the dissolved map.
    """

    # print "Running p-regions model  (Duque, Church and Middleton, 2009)"
    # print "Number of areas: ", len(y)
    # print "Number of regions: ", p, "\n"

    start = tm.time()

    # PARAMETERS

    # Number of areas
    n = len(y)
    l = n - p

    # Area iterator
    numA = range(n)

    d = {}
    temp = range(n - 1)
    for i in temp:
        list1 = []
        for j in numA:
            if i < j:
                list1.append(distanceA2AEuclideanSquared([y[i], y[j]])[0][0])
        d[i] = list1

#-----------------------------------
    try:

        # CONSTRUCTION OF THE MODEL

        # Tolerance to non-integer solutions
        tol = 1e-5  #1e-9 #min value: 1e-9

        # SUBTOUR ELIMINATION CONSTRAINTS
        def subtourelim(model, where):
            if where == GRB.callback.MIPSOL:
                vals = model.cbGetSolution(model.getVars())
                varsx = model.getVars()[n * n:]
                varsx1 = [varsx[i:i + n] for i in range(0, len(varsx), n)]
                t1 = [vals[i:i + n] for i in range(0, n * n, n)]
                x1 = [
                    vals[n * n + i:n * n + i + n] for i in range(0, n * n, n)
                ]
                num = list(numA)
                cycle = []  #sets of areas involved in cycles
                while num:
                    area = num[0]
                    c = [area]
                    acum = 0
                    k = 0
                    while True:
                        if k == n:
                            break
                        if x1[area][k] >= 1 - tol:  #==1:
                            acum = 1
                            break
                        k += 1
                    f = num.remove(area)
                    for j in numA:
                        if t1[area][j] >= 1 - tol:  #==1:
                            c.append(j)
                            k = 0
                            while True:
                                if k == n:
                                    break
                                if x1[j][k] >= 1 - tol:  #==1:
                                    acum += 1
                                    break
                                k += 1
                            if num.count(j) != 0:
                                b = num.remove(j)
                    if acum == len(c) and acum > 1:
                        cycle.append(c)
                if len(cycle):
                    # add a subtour elimination constraint
                    for cycle_k in cycle:
                        temp1 = 0
                        card = len(cycle_k)
                        for i in cycle_k:
                            for j in cycle_k:
                                if j in w[i]:
                                    temp1 += varsx1[i][j]
                        if temp1 != 0:
                            model.cbLazy(temp1 <= card - 1)

        # Create the new model
        m = Model("pRegions")

        # Create variables

        # t_ij
        # 1 if areas i and j belongs to the same region
        # 0 otherwise
        t = []
        for i in numA:
            t_i = []
            for j in numA:
                t_i.append(m.addVar(vtype=GRB.BINARY, name="t_" + str([i, j])))
            t.append(t_i)

        # x_ij
        # 1 if arc between adjacent areas i and j is selected for a tree graph
        # 0 otherwise
        x = []
        for i in numA:
            x_i = []
            for j in numA:
                x_i.append(m.addVar(vtype=GRB.BINARY, name="x_" + str([i, j])))
            x.append(x_i)

        # Integrate new variables
        m.update()

        # Objective function

        of = 0
        for i in numA:
            for j in range(i + 1, n):
                of += t[i][j] * d[i][j - i - 1]

        m.setObjective(of, GRB.MINIMIZE)

        # Constraints 1, 5
        temp = 0
        for i in numA:
            for j in w[i]:
                temp += x[i][j]
                m.addConstr(x[i][j] - t[i][j] <= tol, "c5_" + str([i, j]))

        m.addConstr(temp == l - tol, "c1")

        # Constraint 2
        i = 0
        for x_i in x:
            temp = []
            for j in w[i]:
                temp.append(x_i[j])
            m.addConstr(quicksum(temp) <= 1 - tol, "c2_" + str(i))
            i += 1

        # Constraints 3, 4
        for i in numA:
            for j in numA:
                if i != j:
                    m.addConstr(t[i][j] - t[j][i] <= tol, "c4_" + str([i, j]))
                    for em in numA:
                        if em != j:
                            m.addConstr(
                                t[i][j] + t[i][em] - t[j][em] <= 1 - tol,
                                "c3_" + str([i, j, em]))

        # Constraint REDUNDANTE
        for i in numA:
            for j in numA:
                if i != j:
                    m.addConstr(x[i][j] + x[j][i] <= 1,
                                "c3_" + str([i, j, em]))

        m.update()

        #Writes the .lp file format of the model
        #m.write("test.lp")

        #To reduce memory use
        #m.setParam('Threads',1)
        #m.setParam('NodefileStart',0.1)
        # To disable optimization output
        #m.setParam('OutputFlag',False)
        #m.setParam('ScaleFlag',0)
        # To set the tolerance to non-integer solutions
        m.setParam('IntFeasTol', tol)
        m.setParam(
            'LogFile', 'CP-' + str(conseq) + '-' + str(n) + '-' + str(p) +
            '-' + str(rho) + '-' + str(inst))
        # To enable lazy constraints
        m.params.LazyConstraints = 1
        m.params.timeLimit = 1800
        #m.params.ResultFile= "resultados.sol"
        m.optimize(subtourelim)

        time = tm.time() - start

        # for v in m.getVars():
        # if v.x >0:
        # print v.varName, v.x

        #import pdb; pdb.set_trace()
        # sol = [0 for k in numA]
        # num = list(numA)
        # regID=0 #Number of region
        # while num:
        # area = num[0]
        # sol[area]=regID
        # f = num.remove(area)
        # for j in numA:
        # if t[area][j].x>=1-tol:#==1:
        # sol[j] = regID
        # if num.count(j)!=0:
        # b = num.remove(j)
        # regID += 1

        # print 'FINAL SOLUTION:', sol
        # print 'FINAL OF:', m.objVal
        # print 'FINAL bound:', m.objBound
        # print 'GAP:', m.MIPGap
        # print "running time", time
        # print "running timeGR", m.Runtime
        output = {
            "objectiveFunction": m.objVal,
            "bestBound": m.objBound,
            "running time": time,
            "algorithm": "pRegionsExactCP",
            #"regions" : len(sol),
            "r2a": "None",  #sol,
            "distanceType": "EuclideanSquared",
            "distanceStat": "None",
            "selectionType": "None",
            "ObjectiveFunctionType": "None"
        }
        print "Done"
        return output

    except GurobiError:
        print 'Error reported'
示例#4
0
def execPregionsExactCP(y, w, p=2,rho='none', inst='none', conseq='none'):

    #EXPLICAR QUÉ ES EL P-REGIONS
    """P-regions model

    The p-regions model, devised by [Duque_Church_Middleton2009]_, 
	clusters a set of geographic areas into p spatially contiguous 
	regions while minimizing within cluster heterogeneity.
    In Clusterpy, the p-regions model is formulated as a mixed 
	integer-programming (MIP) problem and solved using the 
	Gurobi optimizer. ::
	
    #CÓMO CORRERLO layer.cluster(...)
		layer.cluster('pRegionsExact',vars,<p>,<wType>,<std>,<dissolve>,<dataOperations>)

    :keyword vars: Area attribute(s) (e.g. ['SAR1','SAR2','POP'])  
    :type vars: list
    :keyword p: Number of spatially contiguous regions to be generated. Default value p = 2.
	:type p: integer
    :keyword wType: Type of first-order contiguity-based spatial matrix: 'rook' or 'queen'. Default value wType = 'rook'. 
    :type wType: string
    :keyword std: If = 1, then the variables will be standardized.
    :type std: binary
    :keyword dissolve: If = 1, then you will get a "child" instance of the layer that contains the new regions. Default value = 0. Note: Each child layer is saved in the attribute layer.results. The first algorithm that you run with dissolve=1 will have a child layer in layer.results[0]; the second algorithm that you run with dissolve=1 will be in layer.results[1], and so on. You can export a child as a shapefile with layer.result[<1,2,3..>].exportArcData('filename')
    :type dissolve: binary
    :keyword dataOperations: Dictionary which maps a variable to a list of operations to run on it. The dissolved layer will contains in it's data all the variables specified in this dictionary. Be sure to check the input layer's fieldNames before use this utility.
    :type dataOperations: dictionary
 
    The dictionary structure must be as showed bellow.

    >>> X = {}
    >>> X[variableName1] = [function1, function2,....]
    >>> X[variableName2] = [function1, function2,....]

    Where functions are strings which represents the name of the 
    functions to be used on the given variableName. Functions 
    could be,'sum','mean','min','max','meanDesv','stdDesv','med',
    'mode','range','first','last','numberOfAreas. By deffault just
    ID variable is added to the dissolved map.
    """

    # print "Running p-regions model  (Duque, Church and Middleton, 2009)"
    # print "Number of areas: ", len(y) 
    # print "Number of regions: ", p, "\n"
    
    start = tm.time()

	# PARAMETERS
	
    # Number of areas
    n = len(y)
    l=n-p

    # Area iterator
    numA = range(n)

    d={}
    temp=range(n-1)
    for i in temp:
        list1=[]
    	for j in numA:
            if i<j:
                list1.append(distanceA2AEuclideanSquared([y[i],y[j]])[0][0])
        d[i]=list1



#-----------------------------------
    try: 
	
		# CONSTRUCTION OF THE MODEL
		
		# Tolerance to non-integer solutions
		tol = 1e-5#1e-9 #min value: 1e-9
	
		# SUBTOUR ELIMINATION CONSTRAINTS
		def subtourelim(model, where):
			if where == GRB.callback.MIPSOL:
				vals = model.cbGetSolution(model.getVars())
				varsx = model.getVars()[n*n:]
				varsx1 = [varsx[i:i+n] for i in range(0,len(varsx),n)]
				t1 = [vals[i:i+n] for i in range(0,n*n,n)]
				x1 = [vals[n*n+i:n*n+i+n] for i in range(0,n*n,n)]
				num = list(numA)
				cycle = [] #sets of areas involved in cycles
				while num:
					area = num[0]
					c =[area]
					acum = 0
					k = 0
					while True:
						if k==n:
							break
						if x1[area][k]>=1-tol:#==1:
							acum = 1
							break
						k += 1
					f=num.remove(area)
					for j in numA:
						if t1[area][j]>=1-tol:#==1:
							c.append(j)
							k=0
							while True:
								if k==n:
									break
								if x1[j][k]>=1-tol:#==1:
									acum += 1
									break
								k += 1
							if num.count(j)!=0:
								b =num.remove(j)
					if acum==len(c) and acum>1:
						cycle.append(c)	
				if len(cycle):
					# add a subtour elimination constraint
					for cycle_k in cycle:
							temp1 = 0
							card = len(cycle_k)
							for i in cycle_k:
								for j in cycle_k:
									if j in w[i]:
										temp1 += varsx1[i][j]
							if temp1!=0:
								model.cbLazy(temp1 <= card-1)

				
	
		# Create the new model
		m=Model("pRegions")

		# Create variables

		# t_ij
		# 1 if areas i and j belongs to the same region
		# 0 otherwise
		t = []
		for i in numA:
			t_i = []
			for j in numA:
				t_i.append(m.addVar(vtype=GRB.BINARY,name="t_"+str([i,j])))
			t.append(t_i)

		# x_ij
		# 1 if arc between adjacent areas i and j is selected for a tree graph
		# 0 otherwise
		x = []
		for i in numA:
			x_i=[]
			for j in numA:
				x_i.append(m.addVar(vtype=GRB.BINARY,name="x_"+str([i,j])))
			x.append(x_i)
			  
		# Integrate new variables
		m.update()
	
		# Objective function

		of=0
		for i in numA:
			for j in range(i+1,n):
				of+=t[i][j]*d[i][j-i-1]
	
		m.setObjective(of, GRB.MINIMIZE)

		# Constraints 1, 5
		temp = 0
		for i in numA:
			for j in w[i]:
				temp += x[i][j]
				m.addConstr(x[i][j]-t[i][j]<=tol,"c5_"+str([i,j]))
		
		m.addConstr(temp == l-tol,"c1")

		# Constraint 2
		i = 0
		for x_i in x:
			temp =[]
			for j in w[i]:
				temp.append(x_i[j])
			m.addConstr(quicksum(temp) <=1-tol, "c2_"+str(i))
			i += 1

		# Constraints 3, 4
		for i in numA:
			for j in numA:
				if i!=j:
					m.addConstr(t[i][j]-t[j][i]<=tol,"c4_"+str([i,j]))
					for em in numA:
						if em!=j:
							m.addConstr(t[i][j]+t[i][em]-t[j][em]<=1-tol,"c3_"+str([i,j,em]))
							
		# Constraint REDUNDANTE
		for i in numA:
			for j in numA:
				if i!=j:	
					m.addConstr(x[i][j]+x[j][i]<=1,"c3_"+str([i,j,em]))
		
		
		
		m.update()

		#Writes the .lp file format of the model
		#m.write("test.lp")
		
		#To reduce memory use
		#m.setParam('Threads',1)
		#m.setParam('NodefileStart',0.1)
		# To disable optimization output
		#m.setParam('OutputFlag',False)
		#m.setParam('ScaleFlag',0)
		# To set the tolerance to non-integer solutions
		m.setParam('IntFeasTol', tol)
		m.setParam('LogFile', 'CP-'+str(conseq)+'-'+str(n)+'-'+str(p)+'-'+str(rho)+'-'+str(inst))
		# To enable lazy constraints
		m.params.LazyConstraints = 1
		m.params.timeLimit = 1800
		#m.params.ResultFile= "resultados.sol"
		m.optimize(subtourelim)
					
			   
		time = tm.time()-start
   				
		# for v in m.getVars():
			# if v.x >0:
				# print v.varName, v.x
		
		#import pdb; pdb.set_trace()
		# sol = [0 for k in numA]
		# num = list(numA)
		# regID=0 #Number of region
		# while num:
			# area = num[0]
			# sol[area]=regID
			# f = num.remove(area)
			# for j in numA:
				# if t[area][j].x>=1-tol:#==1:
					# sol[j] = regID
					# if num.count(j)!=0:
						# b = num.remove(j)
			# regID += 1
					
		# print 'FINAL SOLUTION:', sol
		# print 'FINAL OF:', m.objVal
		# print 'FINAL bound:', m.objBound
		# print 'GAP:', m.MIPGap
		# print "running time", time
		# print "running timeGR", m.Runtime
		output = { "objectiveFunction": m.objVal,
			"bestBound": m.objBound,
			"running time": time,
			"algorithm": "pRegionsExactCP",
			#"regions" : len(sol),
			"r2a": "None",#sol,
			"distanceType" :  "EuclideanSquared",
			"distanceStat" : "None",
			"selectionType" : "None",
			"ObjectiveFunctionType" : "None"} 
		print "Done"
		return output
                
    except GurobiError:
        print 'Error reported'
示例#5
0
def execMinpFlow(y, w, threshold=1, conseq='none'):
	"""Min-p-regions model (Flow formulation)

	The min-p-regions model, devised by [Duque_...2014]_ ,
	clusters a set of geographic areas into the minimum number of homogeneous
	regions such that the value of a spatially extensive regional attribute is
	above a predefined threshold value. In clusterPy we measure heterogeneity as
	the within-cluster sum of squares from each area to the attribute centroid
	of its cluster.  ::
	
		layer.cluster('minpFlow',vars,<threshold>,<wType>,<std>,<dissolve>,<dataOperations>)

	:keyword vars: Area attribute(s) (e.g. ['SAR1','SAR2','POP'])  
	:type vars: list
    :keyword threshold: Minimum value of the constrained variable at regional level. Default value threshold = 100.
    :type threshold: integer
	:keyword p: Number of spatially contiguous regions to be generated. Default value p = 2.
	:keyword wType: Type of first-order contiguity-based spatial matrix: 'rook' or 'queen'. Default value wType = 'rook'. 
	:type wType: string
	:keyword std: If = 1, then the variables will be standardized.
	:type std: binary
	:keyword dissolve: If = 1, then you will get a "child" instance of the layer that contains the new regions. Default value = 0. Note: Each child layer is saved in the attribute layer.results. The first algorithm that you run with dissolve=1 will have a child layer in layer.results[0]; the second algorithm that you run with dissolve=1 will be in layer.results[1], and so on. You can export a child as a shapefile with layer.result[<1,2,3..>].exportArcData('filename')
	:type dissolve: binary
	:keyword dataOperations: Dictionary which maps a variable to a list of operations to run on it. The dissolved layer will contains in it's data all the variables specified in this dictionary. Be sure to check the input layer's fieldNames before use this utility.
	:type dataOperations: dictionary

	The dictionary structure must be as showed bellow.

	>>> X = {}
	>>> X[variableName1] = [function1, function2,....]
	>>> X[variableName2] = [function1, function2,....]

	Where functions are strings which represents the name of the 
	functions to be used on the given variableName. Functions 
	could be,'sum','mean','min','max','meanDesv','stdDesv','med',
	'mode','range','first','last','numberOfAreas. By default just
	ID variable is added to the dissolved map.
	"""

	print "Running max-p-regions model (Duque, Anselin and Rey, 2010)"
	print "Exact method"
	print "Number of areas: ", len(y) 
	print "threshold value: ", threshold
	
	start = tm.time()

	# Number of areas
	n = len(y)

	# Area iterator
	numA = range(n)
	
	Wr=w
	
	z = {}
	l = {} #spatially extensive attribute
	for i in numA:
		z[i] = y[i][0]
		l[i] = y[i][1]

	d = nm.zeros(shape = (n,n))
	for i in numA:
		for j in numA:
			d[i,j]=distanceA2AEuclideanSquared([[z[i]],[z[j]]])[0][0]
		
	# h: scaling factor
	temp = 0
	for i in numA:
		for j in numA:
			if i<j:
				temp += d[i][j]
	h = 1+ nm.floor(nm.log(temp))


	#-----------------------------------
	try: 
	# Create the new model
		m=Model("maxpRegions")

		# Create variables

		# t_ij
		# 1 if areas i and j belongs to the same region
		# 0 otherwise
		# t = []
		# for i in numA:
			# t_i = []
			# for j in numA:
				# t_i.append(m.addVar(vtype=GRB.BINARY,name="t_"+str([i,j])))
			# t.append(t_i)
		t={(i,j):m.addVar(vtype=GRB.BINARY,name="t_"+str([i,j])) for i in numA for j in numA if i!=j}
		
		# f_ijk
		# amount of flow from area i to j in region k
		f={(i,j,k):m.addVar(vtype=GRB.SEMICONT,name="f_"+str([i,j,k])) for i in numA for j in Wr[i] for k in numA}
		# f = []
		# for i in numA:
			# f_i=[]
			# for j in Wr[i]: 
				# f_ij=[]
				# for k in numA:
					# f_ij.append(m.addVar(vtype=GRB.SEMICONT,name="f_"+str([i,j,k])))
				# f_i.append(f_ij)
			# f.append(f_i)
			
		# y_ik
		# 1 if areas i included in region k
		# 0 otherwise
		y = []
		for i in numA:
			y_i = []
			for k in numA:
				y_i.append(m.addVar(vtype=GRB.BINARY,name="y_"+str([i,k])))
			y.append(y_i)

		# w_ik
		# 1 if areas i is chosen as a sink
		# 0 otherwise
		w = []
		for i in numA:
			w_i = []
			for k in numA:
				w_i.append(m.addVar(vtype=GRB.BINARY,name="w_"+str([i,k])))
			w.append(w_i)
			  
		# Integrate new variables
		m.update()

		# Objective function

		temp1 = []
		temp2 = []
		for i in numA:
			# Number of regions
			for k in numA:
				temp1.append(w[i][k])
			# Total heterogeneity
			for j in numA:
				if i!=j:
					temp2.append(d[i][j]*t[i,j])

		m.setObjective((10**float(h))*quicksum(temp1)+quicksum(temp2), GRB.MINIMIZE)

		# Constraints 1
		for i in numA:
			temp = []
			for k in numA:
				temp.append(y[i][k])
			m.addConstr(quicksum(temp)==1,"c1_"+str([i]))
			
		# m.addConstr(w[4][0]==1)
		# m.addConstr(f[3][4][0]==1)
					
		# Constraints 2		
		for i in numA:
			for k in numA:
				m.addConstr(w[i][k]<=y[i][k],"c2_"+str([i,k]))
			
		# Constraints 3		
		for k in numA:
			temp = []
			for i in numA:
				temp.append(w[i][k])
			m.addConstr(quicksum(temp) <= 1,"c3_"+str([k]))

		# Constraints 4, 5
		for k in numA:
			for i in numA:
				for j in Wr[i]:
					m.addConstr(f[i,j,k]<=y[i][k]*n,"c4_"+str([i,j,k]))
					m.addConstr(f[i,j,k]<=y[j][k]*n,"c5_"+str([i,j,k]))
		
		# Constraints 6 
		for k in numA:
			temp1 = []
			temp2 = []
			for i in numA:
				temp1.append(w[i][k])
				for j in Wr[i]:
					temp2.append(f[i,j,k])
			m.addConstr(quicksum(temp2)<=quicksum(temp1)*(n*1.0/2)*(n+1),"c6_"+str([k]))				


		# Constraints 7
		for i in numA:
			for k in numA:
				temp1=[]
				temp2 = []
				for j in Wr[i]:
					temp1.append(f[i,j,k])
					temp2.append(f[j,i,k])
					#temp1.append(f[i][j][k])
					#temp2.append(f[j][i][k])
				m.addConstr(y[i][k]-n*w[i][k]-quicksum(temp1)+quicksum(temp2)<=0 ,"c7_"+str([i,k]))				

		# Constraints 8
		for i in numA:
			for j in numA:
				if i!=j:
					for k in numA:
						m.addConstr(y[i][k]+y[j][k]-1-t[i,j]-t[j,i]<=0,"c8_"+str([i,j,k]))	
		
		# Constraints 9
		for i in numA:
			for j in numA:
				if i!=j:
					m.addConstr(-t[i,j]*(l[i]-l[j])<=0,"c9_"+str([i,j]))	
			
		# Constraints 10
		for i in numA:
			for j in numA:
				if i!=j:
					m.addConstr(t[i,j]*(l[i]-l[j])-t[i,j]*threshold<=0,"c10_"+str([i,j]))
		
		m.update()


		#To reduce memory use
		m.setParam('Nodefilestart',0.1)
		#m.setParam('OutputFlag',False)
		m.params.timeLimit = 10800#1800
		m.setParam('LogFile', 'minpF-'+str(conseq)+'-'+str(n)+'-'+str(threshold))
		
		
		m.optimize()
		   
		time = tm.time()-start
		

		reg= []
		for i in numA:
			for k in numA:
				if y[i][k].x==1:
					#if the region cannot be found
					if reg.count(k)==0:
						reg.append(k)
		p=len(reg)
		
		
		for v in m.getVars():
			if v.x >0:
				print v.varName, v.x

				
		#print "p:", regID
		#print 'FINAL SOLUTION:', sol
		# print 'FINAL OF:', m.objVal
		# print "running time", time
		output = { "objectiveFunction": m.objVal,
			"bestBound": m.objBound,
			"running time": time,
			"algorithm": "minpOrder",
			"regions" : 'none',#len(sol),
			"r2a": 'none',#sol,
			"p": p,
			"distanceType" :  "EuclideanSquared",
			"distanceStat" : "None",
			"selectionType" : "None",
			"ObjectiveFunctionType" : "None"} 
		print "Done"
		return output
				
	except GurobiError:
		print 'Error reported'
示例#6
0
def execMinpFlow(y, w, threshold=1, conseq='none'):
    """Min-p-regions model (Flow formulation)

	The min-p-regions model, devised by [Duque_...2014]_ ,
	clusters a set of geographic areas into the minimum number of homogeneous
	regions such that the value of a spatially extensive regional attribute is
	above a predefined threshold value. In clusterPy we measure heterogeneity as
	the within-cluster sum of squares from each area to the attribute centroid
	of its cluster.  ::
	
		layer.cluster('minpFlow',vars,<threshold>,<wType>,<std>,<dissolve>,<dataOperations>)

	:keyword vars: Area attribute(s) (e.g. ['SAR1','SAR2','POP'])  
	:type vars: list
    :keyword threshold: Minimum value of the constrained variable at regional level. Default value threshold = 100.
    :type threshold: integer
	:keyword p: Number of spatially contiguous regions to be generated. Default value p = 2.
	:keyword wType: Type of first-order contiguity-based spatial matrix: 'rook' or 'queen'. Default value wType = 'rook'. 
	:type wType: string
	:keyword std: If = 1, then the variables will be standardized.
	:type std: binary
	:keyword dissolve: If = 1, then you will get a "child" instance of the layer that contains the new regions. Default value = 0. Note: Each child layer is saved in the attribute layer.results. The first algorithm that you run with dissolve=1 will have a child layer in layer.results[0]; the second algorithm that you run with dissolve=1 will be in layer.results[1], and so on. You can export a child as a shapefile with layer.result[<1,2,3..>].exportArcData('filename')
	:type dissolve: binary
	:keyword dataOperations: Dictionary which maps a variable to a list of operations to run on it. The dissolved layer will contains in it's data all the variables specified in this dictionary. Be sure to check the input layer's fieldNames before use this utility.
	:type dataOperations: dictionary

	The dictionary structure must be as showed bellow.

	>>> X = {}
	>>> X[variableName1] = [function1, function2,....]
	>>> X[variableName2] = [function1, function2,....]

	Where functions are strings which represents the name of the 
	functions to be used on the given variableName. Functions 
	could be,'sum','mean','min','max','meanDesv','stdDesv','med',
	'mode','range','first','last','numberOfAreas. By default just
	ID variable is added to the dissolved map.
	"""

    print("Running max-p-regions model (Duque, Anselin and Rey, 2010)")
    print("Exact method")
    print("Number of areas: ", len(y))
    print("threshold value: ", threshold)

    start = tm.time()

    # Number of areas
    n = len(y)

    # Area iterator
    numA = list(range(n))

    Wr = w

    z = {}
    l = {}  #spatially extensive attribute
    for i in numA:
        z[i] = y[i][0]
        l[i] = y[i][1]

    d = nm.zeros(shape=(n, n))
    for i in numA:
        for j in numA:
            d[i, j] = distanceA2AEuclideanSquared([[z[i]], [z[j]]])[0][0]

    # h: scaling factor
    temp = 0
    for i in numA:
        for j in numA:
            if i < j:
                temp += d[i][j]
    h = 1 + nm.floor(nm.log(temp))

    #-----------------------------------
    try:
        # Create the new model
        m = Model("maxpRegions")

        # Create variables

        # t_ij
        # 1 if areas i and j belongs to the same region
        # 0 otherwise
        # t = []
        # for i in numA:
        # t_i = []
        # for j in numA:
        # t_i.append(m.addVar(vtype=GRB.BINARY,name="t_"+str([i,j])))
        # t.append(t_i)
        t = {(i, j): m.addVar(vtype=GRB.BINARY, name="t_" + str([i, j]))
             for i in numA for j in numA if i != j}

        # f_ijk
        # amount of flow from area i to j in region k
        f = {(i, j, k): m.addVar(vtype=GRB.SEMICONT,
                                 name="f_" + str([i, j, k]))
             for i in numA for j in Wr[i] for k in numA}
        # f = []
        # for i in numA:
        # f_i=[]
        # for j in Wr[i]:
        # f_ij=[]
        # for k in numA:
        # f_ij.append(m.addVar(vtype=GRB.SEMICONT,name="f_"+str([i,j,k])))
        # f_i.append(f_ij)
        # f.append(f_i)

        # y_ik
        # 1 if areas i included in region k
        # 0 otherwise
        y = []
        for i in numA:
            y_i = []
            for k in numA:
                y_i.append(m.addVar(vtype=GRB.BINARY, name="y_" + str([i, k])))
            y.append(y_i)

        # w_ik
        # 1 if areas i is chosen as a sink
        # 0 otherwise
        w = []
        for i in numA:
            w_i = []
            for k in numA:
                w_i.append(m.addVar(vtype=GRB.BINARY, name="w_" + str([i, k])))
            w.append(w_i)

        # Integrate new variables
        m.update()

        # Objective function

        temp1 = []
        temp2 = []
        for i in numA:
            # Number of regions
            for k in numA:
                temp1.append(w[i][k])
            # Total heterogeneity
            for j in numA:
                if i != j:
                    temp2.append(d[i][j] * t[i, j])

        m.setObjective((10**float(h)) * quicksum(temp1) + quicksum(temp2),
                       GRB.MINIMIZE)

        # Constraints 1
        for i in numA:
            temp = []
            for k in numA:
                temp.append(y[i][k])
            m.addConstr(quicksum(temp) == 1, "c1_" + str([i]))

        # m.addConstr(w[4][0]==1)
        # m.addConstr(f[3][4][0]==1)

        # Constraints 2
        for i in numA:
            for k in numA:
                m.addConstr(w[i][k] <= y[i][k], "c2_" + str([i, k]))

        # Constraints 3
        for k in numA:
            temp = []
            for i in numA:
                temp.append(w[i][k])
            m.addConstr(quicksum(temp) <= 1, "c3_" + str([k]))

        # Constraints 4, 5
        for k in numA:
            for i in numA:
                for j in Wr[i]:
                    m.addConstr(f[i, j, k] <= y[i][k] * n,
                                "c4_" + str([i, j, k]))
                    m.addConstr(f[i, j, k] <= y[j][k] * n,
                                "c5_" + str([i, j, k]))

        # Constraints 6
        for k in numA:
            temp1 = []
            temp2 = []
            for i in numA:
                temp1.append(w[i][k])
                for j in Wr[i]:
                    temp2.append(f[i, j, k])
            m.addConstr(
                quicksum(temp2) <= quicksum(temp1) * (old_div(n * 1.0, 2)) *
                (n + 1), "c6_" + str([k]))

        # Constraints 7
        for i in numA:
            for k in numA:
                temp1 = []
                temp2 = []
                for j in Wr[i]:
                    temp1.append(f[i, j, k])
                    temp2.append(f[j, i, k])
                    #temp1.append(f[i][j][k])
                    #temp2.append(f[j][i][k])
                m.addConstr(
                    y[i][k] - n * w[i][k] - quicksum(temp1) + quicksum(temp2)
                    <= 0, "c7_" + str([i, k]))

        # Constraints 8
        for i in numA:
            for j in numA:
                if i != j:
                    for k in numA:
                        m.addConstr(
                            y[i][k] + y[j][k] - 1 - t[i, j] - t[j, i] <= 0,
                            "c8_" + str([i, j, k]))

        # Constraints 9
        for i in numA:
            for j in numA:
                if i != j:
                    m.addConstr(-t[i, j] * (l[i] - l[j]) <= 0,
                                "c9_" + str([i, j]))

        # Constraints 10
        for i in numA:
            for j in numA:
                if i != j:
                    m.addConstr(
                        t[i, j] * (l[i] - l[j]) - t[i, j] * threshold <= 0,
                        "c10_" + str([i, j]))

        m.update()

        #To reduce memory use
        m.setParam('Nodefilestart', 0.1)
        #m.setParam('OutputFlag',False)
        m.params.timeLimit = 10800  #1800
        m.setParam(
            'LogFile',
            'minpF-' + str(conseq) + '-' + str(n) + '-' + str(threshold))

        m.optimize()

        time = tm.time() - start

        reg = []
        for i in numA:
            for k in numA:
                if y[i][k].x == 1:
                    #if the region cannot be found
                    if reg.count(k) == 0:
                        reg.append(k)
        p = len(reg)

        for v in m.getVars():
            if v.x > 0:
                print(v.varName, v.x)

        #print "p:", regID
        #print 'FINAL SOLUTION:', sol
        # print 'FINAL OF:', m.objVal
        # print "running time", time
        output = {
            "objectiveFunction": m.objVal,
            "bestBound": m.objBound,
            "running time": time,
            "algorithm": "minpOrder",
            "regions": 'none',  #len(sol),
            "r2a": 'none',  #sol,
            "p": p,
            "distanceType": "EuclideanSquared",
            "distanceStat": "None",
            "selectionType": "None",
            "ObjectiveFunctionType": "None"
        }
        print("Done")
        return output

    except GurobiError:
        print('Error reported')