-
Notifications
You must be signed in to change notification settings - Fork 1
/
barrier.py
100 lines (72 loc) · 2.88 KB
/
barrier.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
# %%
from cvxopt import lapack, solvers, matrix, spdiag, log, div, normal, uniform, blas
from cvxopt.modeling import variable, op, max, min, sum
from array import array
from operator import mul
from math import sqrt
import numpy as np
mu = 2e-01 # tolerance for barrier function
j = 10 # number of datacenters
k = 5 # number of jobs each containing t tasks
c1 = [] # input data size
c2 = [] # link bandwidth
c3 = [] # execution time
c4 = [] # communication time
c5 = [] # resource capacity
m = j + k
n = j * k
TMU = np.zeros(shape=(m, n), dtype=int) # a totally unimodular matrix of zeros
# an m x n matrix representing the constraints 5 (computational resource exceed) and constraints 6
# (each job assigned to at least 1 datacenter)
A = matrix(np.array(TMU), tc='i')
# b is our value that x can reach given constraints of A.
b = uniform(m, 1)
# Make x = 0 feasible for barrier. A flow can be allocated 0 time in favor of antoher getting more time. b is always positive.
b /= (1.1 * max(abs(b)))
# %%
"""
Centering uses Newton's Centering Method. This part is from the example given by cvxopt library.
Our barrier function is simply the update of the x value towards an optimal solution. x is a double value here.
We are given mu by the tolerance above. If any centering reaches close to mu we stop since that is close to the
edge of non-feasible solutions and exterior to any optimal solution.
"""
def barrier():
# variables kept same from cvxopt example
MAXITERS = 100
ALPHA = 0.01
BETA = 0.5
x = matrix(0.0, (n, 1))
H = matrix(0.0, (n, n)) # Symmetrix matrix
for iter in range(MAXITERS):
# get the gradient of the function
d = (b - A*x) ** - 1
g = A.T * d
# print(d[:, n*[0]].size)
# print(A.size)
"""bug here: won't multiply of two matrix of same dimension. code is looking into another dimension?"""
# get Hessian
# lower diaganol multiplied to constraint matrix, n*[0] is first center x^t(0)
h = np.zeros(shape=(m, n))
np.matmul(d[:, n*[0]], A, h)
# use the BLAS solver to get the symmetric matrix and get roots
blas.syrk(h, H, trans='T')
# do Newton's step
v = -g # g is our gradient
# LAPACK solves the matrix and gives us the tep value to transverse with
lapack.posv(H, v)
# Stop condition if exceeding tolerance
lam = blas.dot(g, v)
if sqrt(-lam) < mu:
return x # return the orignal value if we're above tolerance
# Line search to go to optimal using ALPHA and BETA
y = mul(A * v, d)
step = 1.0
while 1 - step*max(y) < 0:
step *= BETA
while True:
if -sum(log(1 - step*y)) < (ALPHA * step * lam):
break
step *= BETA
# increment x by the step times the negative gradient otherwise
x += step*v
barrier()