예제 #1
0
 def connectServer(self):
   #doCloud
   if self.doCloudUrl != None:
     if import_error:
       self.Error(import_error)
     if sys.platform == "win32":
       os.environ['REQUESTS_CA_BUNDLE'] = os.path.join(os.path.dirname(sys.executable), "GMSPython", "Lib", "site-packages", "certifi", "cacert.pem")
     self.doCloudClient = JobClient(self.doCloudUrl, self.doCloudKey)
   #neos
   else:
     if self.logopt in [1,3,4]:
       sys.stdout.write("Connecting to: %s://%s:%s\n" % (self.serverProtocol,self.serverHost,self.serverPort))
     if self.logopt in [2,4]:
       # Append the message to the logfile indicated
       try:
         f = open(kestrel.logfilename,'a')
         f.write("Connecting to: %s://%s:%s\n" % (self.serverProtocol,self.serverHost,self.serverPort))
         f.close()
       except IOError as e:
         self.Fatal("Could not append to log file %s" % self.logfilename)
     self.neos = xmlrpc.client.Server("%s://%s:%s" % (self.serverProtocol,self.serverHost,self.serverPort))
     
     reply = self.neos.ping()
     if reply.find('alive') < 0:
       raise KestrelException("Unable to contact NEOS at https://%s:%d" % \
             (self.host, self.port))
    def __init__(self, problemName, model=None, resultDataModel=None, credentials=None, *attachments):
        '''
         Constructs an Optimizer instance.
         The instance requires an optimization model as a parameter.
         You can also provide one or more data files as attachments, either in OPL .dat or in JSON format. This data does not
         change from solve to solve. If you have input data that does change, you can provide it to the solve method as an OPLCollector object.
         :param problemName: name of this optimization problem instance
         :type problemName: String
         :param model: an optimization model written in OPL
         :type model: Model.Source object or String
         :param resultDataModel: the application data model for the results of the optimization
         :type resultDataModel: dict<String, StructType>
         :param credentials: DOcplexcloud url and api key
         :type credentials: {"url":String, "key":String}
         :param attachments: URLs for files representing the data that does not vary from solve to solve
         :type attachments: list<URL>
        '''
        self.name = problemName
        self.model = model
        self.resultDataModel = resultDataModel
        self.attachData(attachments)
        self.streamsRegistry = []
        self.history = []

        self.credentials = credentials

        self.jobclient = JobClient(credentials["url"], credentials["key"]);
        self.solveStatus = JobSolveStatus.UNKNOWN;
def run_model():
    url = 'https://api-oaas.docloud.ibmcloud.com/job_manager/rest/v1/'
    key = 'api_fc2d8028-3ebc-4a3a-8664-b4018b1c05a8'

    client = JobClient(url=url, api_key=key)

    resp = client.execute(input=[
        'workshop_model.py', '../input/best_submission.csv',
        '../input/family_data.csv'
    ],
                          output='solution.json',
                          load_solution=True,
                          log='logs.txt')
예제 #4
0
    def __init__(self, url, api_key, concurrent_jobs):
        """ Creates a new controller to submit job asynchronously.
        
        Args:
            url: The DOcloud url.
            api_key: The DOcloud api key.
            concurrent_jobs: The number of concurrently submitted jobs.
        """
        self.nb_threads = concurrent_jobs
        self.client = JobClient(url, api_key)

        # This is the opl model file
        self.mod_file = "models/truck.mod"
        # The executor
        self.executor = ThreadPoolExecutor(self.nb_threads)
예제 #5
0
def run_command(prog, argv, url=None, key=None):
    description = '''Command line client for DOcplexcloud.'''
    epilog = '''Command details:
  info           Get and display information for the jobs which ids are
                 specified as ARG.
  download       Download the attachment to the the current directory.
  rm             Delete the jobs which ids are specfied as ARG.
  rm all         Delete all jobs.
  logs           Download and display the logs for the jobs which id are specified.
  ls             Lists the jobs.'''
    epilog_cli = '''
  execute        Submit a job and wait for end of execution. Each ARG that
                 is a file is uploaded as the job input. Example:
                    Example: python run.py execute model.py model.data -v
                      executes a job which input files are model.py and
                      model.dada, in verbose mode.
'''
    filter_help = '''

   Within filters, the following variables are defined:
      now: current date and time as timestamp in millisec
      minute: 60 sec in millisec
      hour: 60 minutes in millisec
      day: 24 hour in millisec
      job: The current job being filtered

    Example filter usage:
        Delete all jobs older than 3 hour
        python -m docplex.cli --filter "now-job['startedAt'] > 3*hour " rm
'''
    if ip is None:
        epilog += epilog_cli
    epilog += filter_help
    parser = argparse.ArgumentParser(prog=prog, description=description, epilog=epilog,
                                     formatter_class=argparse.RawDescriptionHelpFormatter)
    parser.add_argument('command',
                        metavar='COMMAND',
                        help='DOcplexcloud command')
    parser.add_argument('arguments', metavar='ARG', nargs='*',
                        help='Arguments for the command')
    parser.add_argument('--no-delete', action='store_true', default=False,
                        dest='nodelete',
                        help="If specified, jobs are not deleted after execution")
    parser.add_argument('-v', '--verbose', action='store_true',
                        help='Verbose mode')
    parser.add_argument('--as', nargs=1, metavar='HOST',
                        dest="host_config", default=None,
                        help="'as host' - use the cplex_config_<HOST>.py configuration file found in PYTHONPATH")
    parser.add_argument('--url', nargs=1, metavar='URL',
                        dest="url", default=None,
                        help="The DOcplexcloud connection URL. If not specified, will use those found in docplex config files")
    parser.add_argument('--key', nargs=1, metavar='API_KEY',
                        dest="key", default=None,
                        help="The DOcplexcloud connection key. If not specified, will use those found in docplex config files")
    parser.add_argument('--details', action='store_true', default=False,
                        help='Display solve details as they are available')
    parser.add_argument('--filter', metavar='FILTER',  default=None,
                        help='filter on job. Example: --filter "True if (now-job.createdAt) > 3600"')
    parser.add_argument('--quiet', '-q', action='store_true', default=False,
                        help='Only show numeric IDs as output')
    args = parser.parse_args(argv)

    program_result = ProgramResults()

    # Get the context here so that we have some credentials at hand
    context = Context.make_default_context()

    if args.host_config is not None:
        config_name = "cplex_config_%s.py" % args.host_config[0]
        config_file = list(filter(os.path.isfile, [os.path.join(x, config_name) for x in sys.path]))
        if len(config_file) == 0:
            print("Could not find config file for host: %s" % args.host_config[0])
            program_result.return_code = -1
            return(program_result)
        if args.verbose:
            print("Overriding host config with: %s" % config_file[0])
        context.read_settings(config_file[0])

    # use credentials in context unless they are given to this function
    client_url = context.solver.docloud.url if url is None else url
    client_key = context.solver.docloud.key if key is None else key
    # but if there are some credentials in arguments (--url, --key), use them
    if args.url:
        client_url = args.url
    if args.key:
        client_key = args.key
    if args.verbose:
        print('**** Connecting to %s with key %s' % (client_url, client_key))
        print('Will send command %s' % args.command)
        print('Arguments:')
        for i in args.arguments:
            print('  -> %s' % i)
        print('verbose = %s' % args.verbose)

    client = JobClient(client_url, client_key)

    target_jobs = []
    if args.filter:
        jobs = client.get_all_jobs()
        now = (datetime.datetime.now() - datetime.datetime(1970,1,1)).total_seconds() * 1000.0
        minute = 60 * 1000
        hour = 60 * minute
        day = 24 * hour
        context = {'now': now,
                   'minute': minute,
                   'hour': hour,
                   'day': day,
                  }
        for j in jobs:
            context['job'] = j
            keep = False
            try:
                keep = eval(args.filter, globals(), context)
            except KeyError:  # if a key was not foud, just assume expression is false
                keep = False
            if keep:
                target_jobs.append(j)

    if target_jobs:
        for i in target_jobs:
            print('applying to %s' % i['_id'])

    if args.command == 'ls':
        ls_jobs(client, program_result, quiet=args.quiet, selected_jobs=target_jobs)
    elif args.command == 'info':
        if target_jobs:
            args.arguments = [x["_id"] for x in target_jobs]
        elif len(args.arguments) == 1 and args.arguments[0] == 'all':
            args.arguments = [x["_id"] for x in client.get_all_jobs()]
        for id in args.arguments:
            info_text = "NOT FOUND"
            try:
                job = client.get_job(id)
                info_text = json.dumps(job, indent=3)
            except:
                pass
            print("%s:\n%s" % (id, info_text))
    elif args.command == 'rm':
        if target_jobs:
            joblist = [x["_id"] for x in target_jobs]
        elif args.arguments:
            joblist = args.arguments
        else:
            joblist = shlex.split(sys.stdin.read())
        rm_job(client, joblist, verbose=args.verbose)
    elif args.command == 'logs':
        if target_jobs:
            if len(target_jobs) != 1:
                print('Logs can only be retrieved when filter select one job (actual selection count = %s)' % len(target_jobs))
                program_result.return_code = -1
                return(program_result)
            args.arguments = [x["_id"] for x in target_jobs]
        if not args.arguments:
            print('Please specify job list in arguments or using filter.')
            program_result.return_code = -1
            return(program_result)
        for jid in args.arguments:
            log_items = client.get_log_items(jid)
            for log in log_items:
                for record in log["records"]:
                    print(record["message"])
    elif args.command == 'download':
        if target_jobs:
            if len(target_jobs) != 1:
                print('Jobs can only be downloaded when filter select one job (actual selection count = %s)' % len(target_jobs))
                program_result.return_code = -1
                return(program_result)
            args.arguments = [x["_id"] for x in target_jobs]
        for jid in args.arguments:
            job = client.get_job(jid)
            for attachment in job['attachments']:
                print('downloading %s' % attachment['name'])
                with open(attachment['name'], 'wb') as f:
                    f.write(client.download_job_attachment(id, attachment['name']))
    elif args.command == 'execute':
        if target_jobs:
            print('Execute command does not support job filtering')
            program_result.return_code = -1
            return(program_result)
        inputs = [{'name': basename(a), 'filename': a} for a in args.arguments]
        if args.verbose:
            for i in inputs:
                print("Uploading %s as attachment name %s" % (i['filename'], i['name']))
        execute_job(client, inputs, args.verbose, args.details, args.nodelete)
    else:
        print("Unknown command: %s" % args.command)
        program_result.return_code = -1
        return(program_result)
    return(program_result)
예제 #6
0
def solving_placement_problem_from_file(topology_graph, request_graph,
                                        test_num, CPLEX_PATH,
                                        cplex_models_path, results_path,
                                        locally):
    # Reading networkx file
    G_topology = read_json_file(topology_graph)
    G_request = read_json_file(request_graph)

    set_PM = list(G_topology.nodes)
    set_state_or_nf = list(G_request.nodes)
    set_state, set_nf, set_replica = [], [], []
    for i in set_state_or_nf:
        if "function" in i:
            set_nf.append(i)
        elif "state" in i:
            set_state.append(i)
        elif "replica" in i:
            set_replica.append(i)

    if not os.path.isfile("{}/p5_cplex_model_{}_2.lp".format(
            cplex_models_path, test_num)):
        cplex_f = open('{}/p5_cplex_model_{}_2.lp'.format(
            cplex_models_path, test_num),
                       mode='a')

        # TODO: Validating request graph
        for i in set_state:
            try:
                G_request.nodes[i]['size']
            except:
                RuntimeError(
                    "The given request graph is incorrect: State {} has no 'size' value"
                    .format(i))

        s = {i: G_request.nodes[i]['size'] for i in set_state + set_replica}
        c = {i: G_topology.nodes[i]['capacity'] for i in set_PM}
        print("Generating delay matrix...")
        d = generating_delay_matrix(G_topology)
        print("Generating state-function adjacency matrix...")
        e_r = generating_req_adj(set_state, set_nf + set_replica, G_request)
        print("Generating Function mapping matrix...")
        M = generating_nf_mapping_matrix(G_topology)
        print("Generating Anti-Affinity set")
        AA = generating_AA(set_state, G_request)
        print("Generating OR-Link set")
        OL = generating_OL(set_state, set_nf, set_replica, G_request)

        # ## Into File ############################################################################################

        cplex_f = open('{}/p5_cplex_model_{}_2.lp'.format(
            cplex_models_path, test_num),
                       mode='a')
        cplex_f.write("Minimize\n obj: [ ")

        servers = [i for i in set_PM if "server" in i]
        server_permutations = list(itertools.permutations(servers, 2))
        first = True
        for i, j in server_permutations:
            asd = list(
                itertools.permutations(set_state + set_replica + set_nf, 2))
            for u, v in asd:
                if (e_r[u, v] * d[i, j] * 2 > 0):
                    if first:
                        cplex_f.write(
                            " {} y_({},{})_({},{})*z_({},{})\n".format(
                                e_r[u, v] * d[i, j] * 2, i, u, j, v, u, v))
                        first = False
                    else:
                        cplex_f.write(
                            " + {} y_({},{})_({},{})*z_({},{})\n".format(
                                e_r[u, v] * d[i, j] * 2, i, u, j, v, u, v))

        cplex_f.close()
        cplex_f = open('{}/p5_cplex_model_{}_2.lp'.format(
            cplex_models_path, test_num),
                       mode='rb+')
        cplex_f.seek(-1, os.SEEK_END)
        cplex_f.truncate()
        cplex_f = open('{}/p5_cplex_model_{}_2.lp'.format(
            cplex_models_path, test_num),
                       mode='a')
        cplex_f.write("]/2 \n")

        # contraint 1 --------------------------------------------------------------------------------------------

        print("Generating mapping constraints")
        cplex_f.write("\nSubject To \n")
        for u in set_state + set_replica:
            c_name = "c1_{}".format(u)
            cplex_f.write(" {}:  ".format(c_name))
            for i in set_PM:
                cplex_f.write(" x_({},{}) +".format(i, u))

            cplex_f.close()
            cplex_f = open('{}/p5_cplex_model_{}_2.lp'.format(
                cplex_models_path, test_num),
                           mode='rb+')
            cplex_f.seek(-2, os.SEEK_END)
            cplex_f.truncate()
            cplex_f = open('{}/p5_cplex_model_{}_2.lp'.format(
                cplex_models_path, test_num),
                           mode='a')
            cplex_f.write(" = 1\n")

        # contraint 2 --------------------------------------------------------------------------------------------
        print("Generating capacity constraints")
        for i in set_PM:
            c_name = "c2_{}".format(i)
            cplex_f.write(" {}:  ".format(c_name))
            for u in set_state + set_replica:
                cplex_f.write("{} x_({},{}) +".format(s[u], i, u))
            cplex_f.close()
            cplex_f = open('{}/p5_cplex_model_{}_2.lp'.format(
                cplex_models_path, test_num),
                           mode='rb+')
            cplex_f.seek(-2, os.SEEK_END)
            cplex_f.truncate()
            cplex_f = open('{}/p5_cplex_model_{}_2.lp'.format(
                cplex_models_path, test_num),
                           mode='a')
            cplex_f.write(" <= {}\n".format(c[i]))

        # contraint 3 --------------------------------------------------------------------------------------------
        print("Generating AA constraints")
        for i in set_PM:
            if "server" in i:
                for u, v in AA:
                    c_name = "c3_{}_{}_in_{}".format(u, v, i)
                    cplex_f.write(" {}:  ".format(c_name))
                    cplex_f.write(" x_({},{}) + x_({}, {}) <= 1\n".format(
                        i, u, i, v))

        # contraint 4 --------------------------------------------------------------------------------------------
        print("Generating NF mapping constraints")
        for function in set_nf:
            for server in set_PM:
                c_name = "c4_{}_in_{}".format(function, server)
                try:
                    if M[function] == server:
                        cplex_f.write(" {}:  ".format(c_name))
                        cplex_f.write(" x_({},{}) = 1\n".format(
                            server, function))
                    else:
                        cplex_f.write(" {}:  ".format(c_name))
                        cplex_f.write(" x_({},{}) = 0\n".format(
                            server, function))
                except:
                    cplex_f.write(" {}:  ".format(c_name))
                    cplex_f.write(" x_({},{}) = 0\n".format(server, function))

        # contraint 5 --------------------------------------------------------------------------------------------
        def for_multiprocessing(list, from_, to_, test_num, process_id,
                                cplex_models_path):
            print("Starting process {}, from: {}, to:{}".format(
                process_id, from_, to_))
            c5_f = open('{}/c5_testnum{}_tmp{}_2.txt'.format(
                cplex_models_path, test_num, process_id),
                        mode='a')
            start = from_
            for i, j in list[from_:to_]:
                if (start % 10000) == 0:
                    print("{}: {}".format(process_id, start))
                c_name = "c5_({},{})_({},{})_0".format(i[0], i[1], j[0], j[1])
                c5_f.write(" {}:  ".format(c_name))
                c5_f.write(" y_({},{})_({},{}) >= 0 \n".format(
                    i[0], i[1], j[0], j[1]))

                c_name = "c5_({},{})_({},{})_1".format(i[0], i[1], j[0], j[1])
                c5_f.write(" {}:  ".format(c_name))
                c5_f.write(
                    " y_({},{})_({},{}) - x_({},{}) - x_({},{}) >= -1 \n".
                    format(i[0], i[1], j[0], j[1], i[0], i[1], j[0], j[1]))
                start += 1
            c5_f.close()
            print("Ending process {}".format(process_id))

        print(datetime.datetime.now())
        print("Generating QP -> ILP transformation constraints")
        index_set = set()
        for i in set_PM:
            for u in set_state + set_replica + set_nf:
                index_set.add((i, u))
        index_combinations = list(itertools.permutations(index_set, 2))

        index_combinations_size = len(index_combinations)
        print("Size of this contraints: {}".format(index_combinations_size))

        from_to_list = []
        core_num = 10
        core_job_count = len(index_combinations) / 10
        for i in range(core_num):
            from_ = i * core_job_count
            to_ = (i + 1) * core_job_count
            if i == core_num - 1:
                to_ = index_combinations_size
            from_to_list.append((from_, to_))

        import multiprocessing
        processes = []
        for i in range(0, core_num):
            p = multiprocessing.Process(
                target=for_multiprocessing,
                args=(index_combinations, from_to_list[i][0],
                      from_to_list[i][1], test_num, i, cplex_models_path))
            processes.append(p)
            p.start()

        for process in processes:
            process.join()

        tempfiles = [
            "{}/c5_testnum{}_tmp{}_2.txt".format(cplex_models_path, test_num,
                                                 i) for i in range(core_num)
        ]
        for tempfile in tempfiles:
            #print("Adding file {}".format(tempfile))
            tmp_f = open(tempfile, "r")
            cplex_f.write(tmp_f.read())

        for tempfile in tempfiles:
            os.remove(tempfile)

        # contraint 6 --------------------------------------------------------------------------------------------
        print("Generating 'Does it matter?' constraints")
        for u in (set_state + set_nf + set_replica):
            for v in (set_state + set_nf + set_replica):
                if u != v:
                    if is_OL(u, v, OL):
                        c_name = "c6_({},{})_0".format(u, v)
                        cplex_f.write("\n {}:  ".format(c_name))
                        first = True
                        for i, j in get_OLs(u, v, OL):

                            if first:
                                cplex_f.write(" z_({},{})".format(i, j))
                                first = False
                            else:
                                cplex_f.write(" + z_({},{})".format(i, j))
                        cplex_f.write(" = 1 \n")
                    else:
                        c_name = "c6_({},{})_1".format(u, v)
                        if "function" in u and "replica" in v:
                            cplex_f.write("\n {}:  ".format(c_name))
                            cplex_f.write(" z_({},{}) = 0".format(u, v))
                        elif "replica" in u and "state" in v:
                            cplex_f.write("\n {}:  ".format(c_name))
                            cplex_f.write(" z_({},{}) = 0".format(u, v))
                        else:
                            cplex_f.write("\n {}:  ".format(c_name))
                            cplex_f.write(" z_({},{}) = 1".format(u, v))

        # Bounds --------------------------------------------------------------------------------------------
        cplex_f.write("\nBounds\n")
        for i in set_PM:
            for u in set_state + set_replica + set_nf:
                cplex_f.write("0 <= x_({},{}) <= 1\n".format(i, u))

        index_set = set()
        for i in set_PM:
            for u in set_state + set_replica + set_nf:
                index_set.add((i, u))
        index_permutations = list(itertools.permutations(index_set, 2))
        for i, j in index_permutations:
            cplex_f.write("0 <= y_({},{})_({},{}) <= 1\n".format(
                i[0], i[1], j[0], j[1]))

        index_permutations = list(
            itertools.permutations((set_state + set_nf + set_replica), 2))
        for u, v in index_permutations:
            cplex_f.write("0 <= z_({},{}) <= 1\n".format(u, v))

        # Binaries --------------------------------------------------------------------------------------------
        cplex_f.write("\nBinaries\n")
        for i in set_PM:
            for u in set_state + set_replica + set_nf:
                cplex_f.write(" x_({},{})\n".format(i, u))

        index_set = set()
        for i in set_PM:
            for u in set_state + set_replica + set_nf:
                index_set.add((i, u))
        index_permutations = list(itertools.permutations(index_set, 2))
        for i, j in index_permutations:
            cplex_f.write(" y_({},{})_({},{})\n".format(
                i[0], i[1], j[0], j[1]))

        index_permutations = list(
            itertools.permutations((set_state + set_nf + set_replica), 2))
        for u, v in index_permutations:
            cplex_f.write(" z_({},{})\n".format(u, v))

        time.sleep(2)
        cplex_f.write("End\n")
        cplex_f.close()
        ########################################################################################################

    if not os.path.isfile(CPLEX_PATH):
        raise RuntimeError('CPLEX does not exist ({})'.format(CPLEX_PATH))

    if not os.path.isfile("{}/p5_cplex_model_{}_2.sav".format(
            cplex_models_path, test_num)):
        subprocess.call(
            "{} -c 'read {}/p5_cplex_model_{}_2.lp' 'write {}/p5_cplex_model_{}_2.sav sav'"
            .format(CPLEX_PATH, cplex_models_path, test_num, cplex_models_path,
                    test_num),
            shell=True)

    t1 = datetime.datetime.now()
    cost = 0
    mapping_result = {i: "" for i in set_state + set_nf + set_replica}

    if locally:
        # solving problem in locally
        print("\n\nSolving the problem locally - 2")
        subprocess.call(
            "{} -c 'read {}/p5_cplex_model_{}_2.mps' 'optimize' 'write {}/p5_cplex_result_{}_2 sol'"
            .format(CPLEX_PATH, cplex_models_path, test_num, results_path,
                    test_num),
            shell=True)
    else:
        print("\n\nSolving the problem remotely in the IBM cloud - 2")
        if not os.path.isfile("{}/p5_cplex_result_{}_2".format(
                results_path, test_num)):
            client = JobClient(
                "https://api-oaas.docloud.ibmcloud.com/job_manager/rest/v1/",
                "api_e7f3ec88-92fd-4432-84d7-f708c4a33132")
            print(
                "You can check the status of the problem procesing here: https://dropsolve-oaas.docloud.ibmcloud.com/dropsolve"
            )
            resp = client.execute(input=[
                "{}/p5_cplex_model_{}_2.sav".format(cplex_models_path,
                                                    test_num)
            ],
                                  output="{}/p5_cplex_result_{}_2".format(
                                      results_path, test_num))

            if resp.job_info["solveStatus"] == "INFEASIBLE_SOLUTION":
                print("There is no valid mapping!")
                return 0

    def is_json_file(report_input_file):
        with open(report_input_file) as unknown_file:
            c = unknown_file.read(1)
            if c != '<':
                return True
            return False

    t2 = datetime.datetime.now()
    if is_json_file("{}/p5_cplex_result_{}_2".format(results_path, test_num)):
        with open("{}/p5_cplex_result_{}_2".format(results_path,
                                                   test_num)) as f:
            result = json.load(f)

        for i in result["CPLEXSolution"]["variables"]:
            if ("x_" in list(i["name"])) and i["value"] == str(1):
                # print("{} = 1".format(i["name"]))
                server = i["name"].split(',')[0][3:]
                ve = i["name"].split(',')[1][:-1]
                mapping_result[ve] = server

        print("*** Delay cost: {} ***".format(
            result["CPLEXSolution"]["header"]["objectiveValue"]))
        cost = result["CPLEXSolution"]["header"]["objectiveValue"]

    else:
        with open("{}/p5_cplex_result_{}_2".format(results_path, test_num),
                  'r') as file:
            xml_result = file.read().replace('\n', '')
        result = xmltodict.parse(xml_result)
        print("*** Delay cost: {} ***".format(
            result["CPLEXSolution"]["header"]["@objectiveValue"]))
        cost = result["CPLEXSolution"]["header"]["@objectiveValue"]

    running_time = t2 - t1
    print("RUNNING TIME: {}".format(running_time))
    return cost, mapping_result, running_time
    def submit_model_data(self,
                          attachments=None,
                          gzip=False,
                          info_callback=None,
                          info_to_monitor=None):
        """Submits a job to the cloud service.

        Args:
            attachments: A list of attachments. Each attachement is a dict with
                the following keys:
                   - 'name' : the name of the attachment
                   - 'data' : the data for the attachment
            gzip: If ``True``, data is gzipped before sent over the network
            info_callback: A call back to be called when some info are available.
                That callback takes one parameter that is a dict containing
                the info as they are available.
            info_to_monitor: A set of information to monitor with info_callback.
                Currently, can be ``jobid`` and ``progress``.
        """
        self.__vars = None
        self.timed_out = False
        self.results.clear()

        if not info_to_monitor:
            info_to_monitor = {}

        # check that url is valid
        parts = urlparse(self.docloud_context.url)
        if not parts.scheme:
            raise DOcloudConnectorException(
                "Malformed URL: '%s': No schema supplied." %
                self.docloud_context.url)

        proxies = self.docloud_context.proxies
        try:
            client = JobClient(self.docloud_context.url,
                               self.docloud_context.key,
                               proxies=proxies)
        except TypeError:
            # docloud client <= 1.0.172 do not have the proxes
            warnings.warn(
                "Using a docloud client that do not support warnings in init()",
                UserWarning)
            client = JobClient(self.docloud_context.url,
                               self.docloud_context.key)
        self.log("client created")
        if proxies:
            self.log("proxies = %s" % proxies)

        # prepare client
        if self.docloud_context.log_requests:
            client.rest_callback = \
                lambda m, u, *a, **kw: self._rest_callback(m, u, *a, **kw)
        client.verify = self.docloud_context.verify
        client.timeout = self.docloud_context.get('timeout', None)

        try:
            try:
                # Extract the list of attachment names
                att_names = [a['name'] for a in attachments]

                # create job
                jobid = client.create_job(
                    attachments=att_names,
                    parameters=self.docloud_context.job_parameters)
                self.log("job creation submitted, id is: {0!s}".format(jobid))
                if info_callback and 'jobid' in info_to_monitor:
                    info_callback({'jobid': jobid})
            except ConnectionError as c_e:
                raise DOcloudConnectorException(
                    "Cannot connect to {0}, error: {1}".format(
                        self.docloud_context.url, str(c_e)))

            try:
                # now upload data
                for a in attachments:
                    pos = 0
                    if 'data' in a:
                        att_data = {'data': a['data']}
                    elif 'file' in a:
                        att_data = {'file': a['file']}
                        pos = a['file'].tell()
                    elif 'filename' in a:
                        att_data = {'filename': a['filename']}

                    client.upload_job_attachment(jobid,
                                                 attid=a['name'],
                                                 **att_data)
                    self.log("Attachment: %s has been uploaded" % a['name'])
                    if self.docloud_context.debug_dump_dir:
                        target_dir = self.docloud_context.debug_dump_dir
                        if not os.path.exists(target_dir):
                            os.makedirs(target_dir)
                        self.log("Dumping input attachment %s to dir %s" %
                                 (a['name'], target_dir))
                        with open(os.path.join(target_dir, a['name']),
                                  "wb") as f:
                            if 'data' in 'a':
                                if isinstance(a['data'], bytes):
                                    f.write(a['data'])
                                else:
                                    f.write(a['data'].encode('utf-8'))
                            else:
                                a['file'].seek(pos)
                                f.write(a['file'])
                # execute job
                client.execute_job(jobid)
                self.log("DOcplexcloud execute submitted has been started")
                # get job execution status until it's processed or failed
                timedout = False
                try:
                    self._executionStatus = self.wait_for_completion(
                        client,
                        jobid,
                        info_callback=info_callback,
                        info_to_monitor=info_to_monitor)
                except DOcloudInterruptedException:
                    timedout = True
                self.log("docloud execution has finished")
                # get job status. Do this before any time out handling
                self.jobInfo = client.get_job(jobid)

                if self.docloud_context.fire_last_progress and info_callback:
                    progress_data = self.map_job_info_to_progress_data(
                        self.jobInfo)
                    info_callback({'progress': progress_data})

                if timedout:
                    self.timed_out = True
                    self.log("Solve timed out after {waittime} sec".format(
                        waittime=self.docloud_context.waittime))
                    return
                # get solution => download all attachments
                try:
                    for a in client.get_job_attachments(jobid):
                        if a['type'] == 'OUTPUT_ATTACHMENT':
                            name = a['name']
                            self.log("Downloading attachment '%s'" % name)
                            attachment_as_string = self._as_string(
                                client.download_job_attachment(jobid,
                                                               attid=name))
                            self.results[name] = attachment_as_string
                            if self.docloud_context.debug_dump_dir:
                                target_dir = self.docloud_context.debug_dump_dir
                                if not os.path.exists(target_dir):
                                    os.makedirs(target_dir)
                                self.log("Dumping attachment %s to dir %s" %
                                         (name, target_dir))
                                with open(os.path.join(target_dir, name),
                                          "wb") as f:
                                    f.write(
                                        attachment_as_string.encode('utf-8'))
                except DOcloudNotFoundError:
                    self.log("no solution in attachment")
                self.log("docloud results have been received")
                # on_solve_finished_cb
                if self.docloud_context.on_solve_finished_cb:
                    self.docloud_context.on_solve_finished_cb(jobid=jobid,
                                                              client=client,
                                                              connector=self)
                return
            finally:
                if self.docloud_context.delete_job:
                    deleted = client.delete_job(jobid)
                    self.log("delete status for job: {0!s} = {1!s}".format(
                        jobid, deleted))

        finally:
            client.close()
from docloud.job import JobClient

if __name__ == '__main__':
    url = "Paste your base URL"
    api_key = "Paste your api key"

    client = JobClient(url, api_key)

    resp = client.execute(input=["models/truck.dat", "models/truck.mod"],
                          output="results.json")
예제 #9
0
def solving_placement_problem_from_file(topology_graph, request_graph,
                                        test_num):
    if not os.path.isfile(
            "./cplex_models/p5_cplex_model_{}.lp".format(test_num)):

        cplex_f = open('./cplex_models/p5_cplex_model_{}.lp'.format(test_num),
                       mode='a')

        # Reading networkx file
        G_topology = read_json_file(topology_graph)
        G_request = read_json_file(request_graph)

        set_PM = list(G_topology.nodes)
        set_state_or_nf = list(G_request.nodes)
        set_state, set_nf, set_replica = [], [], []
        for i in set_state_or_nf:
            if "function" in i:
                set_nf.append(i)
            elif "state" in i:
                set_state.append(i)
            elif "replica" in i:
                set_replica.append(i)
        # TODO: Validating request graph
        for i in set_state:
            try:
                G_request.nodes[i]['size']
            except:
                RuntimeError(
                    "The given request graph is incorrect: State {} has no 'size' value"
                    .format(i))

        s = {i: G_request.nodes[i]['size'] for i in set_state + set_replica}
        c = {i: G_topology.nodes[i]['capacity'] for i in set_PM}
        print("Generating delay matrix...")
        d = generating_delay_matrix(G_topology)
        print("Generating state-function adjacency matrix...")
        e_r = generating_req_adj(set_state, set_nf + set_replica, G_request)
        print("Generating Function mapping matrix...")
        M = generating_nf_mapping_matrix(G_topology)
        print("Generating Anti-Affinity set")
        AA = generating_AA(set_state, G_request)
        print("Generating OR-Link set")
        OL = generating_OL(set_state, set_nf, set_replica, G_request)

        opt_model = cpx.Model(name="P5")

        # Binary variables
        print("Creating variables 1...")
        x_vars = {(i, u): opt_model.binary_var(name="x_({0},{1})".format(i, u))
                  for i in set_PM for u in set_state + set_replica + set_nf}

        print("Creating variables 2...")
        index_set = set()
        for i in set_PM:
            for u in set_state + set_replica + set_nf:
                index_set.add((i, u))
        index_permutations = list(itertools.permutations(index_set, 2))
        y_vars = {(i[0], i[1], j[0], j[1]): opt_model.binary_var(
            name="y_({},{})_({},{})".format(i[0], i[1], j[0], j[1]))
                  for i, j in index_permutations}

        print("Creating variables 3...")
        index_permutations = list(
            itertools.permutations((set_state + set_nf + set_replica), 2))
        z_vars = {(u, v): opt_model.binary_var(name="z_({},{})".format(u, v))
                  for u, v in index_permutations}

        # == constraints 1 - virtual element can be mapped into only one server
        print(
            "Creating constraints 1 - virtual element can be mapped into only one server"
        )
        for u in set_state + set_replica:
            c_name = "c1_{}".format(u)
            opt_model.add_constraint(ct=opt_model.sum(x_vars[i, u]
                                                      for i in set_PM) == 1,
                                     ctname=c_name)

        # <= constraints 2 - server capacity constraint
        print("Creating constraints 2 - server capacity constraint")
        for i in set_PM:
            c_name = "c2_{}".format(i)
            opt_model.add_constraint(
                ct=opt_model.sum(s[u] * x_vars[i, u]
                                 for u in set_state + set_replica) <= c[i],
                ctname=c_name)

        # <= constraints 3 - anti-affinity rules
        print("Creating constraints 3 - anti-affinity rules")
        for i in set_PM:
            if "server" in i:
                for u, v in AA:
                    c_name = "c3_{}_{}_in_{}".format(u, v, i)
                    opt_model.add_constraint(
                        ct=(x_vars[i, u] + x_vars[i, v]) <= 1, ctname=c_name)

        # == constraints 4 - NFs running places
        print("Creating constraints 4 - NFs running places")
        for function in set_nf:
            for server in set_PM:
                c_name = "c4_{}_in_{}".format(function, server)
                try:
                    if M[function] == server:
                        opt_model.add_constraint(ct=x_vars[server,
                                                           function] == 1,
                                                 ctname=c_name)
                        # print("x_vars[{}, {}] == 1".format(server, function))
                    else:
                        opt_model.add_constraint(ct=x_vars[server,
                                                           function] == 0,
                                                 ctname=c_name)
                        # print("x_vars[{}, {}] == 0".format(server, function))
                except:
                    opt_model.add_constraint(ct=x_vars[server, function] == 0,
                                             ctname=c_name)
                    # print("x_vars[{}, {}] == 0".format(server, function))

        # >= constraints 5 - QP -> ILP transformation constraints
        print("Creating constraints 5 - QP -> ILP transformation constraints")

        index_set = set()
        for i in set_PM:
            for u in set_state + set_replica + set_nf:
                index_set.add((i, u))
        index_combinations = list(itertools.permutations(index_set, 2))

        for i, j in index_combinations:
            c_name = "c5_({},{})_({},{})_0".format(i[0], i[1], j[0], j[1])
            opt_model.add_constraint(ct=y_vars[i[0], i[1], j[0], j[1]] >= 0,
                                     ctname=c_name)
            c_name = "c5_({},{})_({},{})_1".format(i[0], i[1], j[0], j[1])
            opt_model.add_constraint(
                ct=y_vars[i[0], i[1], j[0], j[1]] >=
                (x_vars[i[0], i[1]] + x_vars[j[0], j[1]] - 1),
                ctname=c_name)

        print("Creating constraints 6 - z variable rules")
        for u in (set_state + set_nf + set_replica):
            for v in (set_state + set_nf + set_replica):
                if u != v:
                    if is_OL(u, v, OL):
                        c_name = "c6_({},{})_0".format(u, v)
                        opt_model.add_constraint(ct=opt_model.sum(
                            z_vars[(i, j)] for i, j in get_OLs(u, v, OL)) == 1,
                                                 ctname=c_name)
                    else:
                        c_name = "c6_({},{})_1".format(u, v)
                        if "function" in u and "replica" in v:
                            opt_model.add_constraint(ct=z_vars[(u, v)] == 0,
                                                     ctname=c_name)
                        elif "replica" in u and "state" in v:
                            opt_model.add_constraint(ct=z_vars[(u, v)] == 0,
                                                     ctname=c_name)
                        else:
                            opt_model.add_constraint(ct=z_vars[(u, v)] == 1,
                                                     ctname=c_name)

        print("Creating Objective function...")
        servers = [i for i in set_PM if "server" in i]
        server_permutations = list(itertools.permutations(servers, 2))
        objective = opt_model.sum(
            y_vars[i, u, j, v] * e_r[u, v] * d[i, j] * z_vars[u, v]
            for i, j in server_permutations for u, v in list(
                itertools.permutations(set_state + set_replica + set_nf, 2)))

        # # # for minimization
        opt_model.minimize(objective)

        print("Exporting the problem")
        opt_model.export_as_lp(basename="p5_cplex_model_{}".format(test_num),
                               path="./cplex_models")

        subprocess.call(
            "/home/epmetra/projects/cplex/cplex/bin/x86-64_linux/cplex -c 'read /home/epmetra/projects/LO/cplex_models/p5_cplex_model_{}.lp' 'write /home/epmetra/projects/LO/cplex_models/p5_cplex_model_{}.mps mps'"
            .format(test_num, test_num),
            shell=True)

        # solving with local cplex
        # print("Solving the problem locally")
        # print(datetime.datetime.now())
        # asd = opt_model.solve()

        # solving in the docplex cloud
        print("Solving the problem by the cloud - 1")
    print(datetime.datetime.now())

    if not os.path.isfile(
            "optimization_results/p5_cplex_result_{}.json".format(test_num)):
        client = JobClient(
            "https://api-oaas.docloud.ibmcloud.com/job_manager/rest/v1/",
            "api_e7f3ec88-92fd-4432-84d7-f708c4a33132")
        print(
            "You can check the status of the problem procesing here: https://dropsolve-oaas.docloud.ibmcloud.com/dropsolve"
        )
        resp = client.execute(
            input=["./cplex_models/p5_cplex_model_{}.mps".format(test_num)],
            output="optimization_results/p5_cplex_result_{}.json".format(
                test_num))

        mapping_result = {i: "" for i in set_state + set_nf + set_replica}

        if resp.job_info["solveStatus"] == "INFEASIBLE_SOLUTION":
            print("There is no valid mapping!")
            return 0
        else:
            with open("./optimization_results/p5_cplex_result_{}.json".format(
                    test_num)) as f:
                result = json.load(f)
                for i in result["CPLEXSolution"]["variables"]:
                    if ("x_" in i["name"]) and i["value"] == str(1):
                        print("{} = 1".format(i["name"]))
                        server = i["name"].split(',')[0][3:]
                        ve = i["name"].split(',')[1][:-1]
                        mapping_result[ve] = server

            print("*** Delay cost: {} ***".format(
                result["CPLEXSolution"]["header"]["objectiveValue"]))
            return result["CPLEXSolution"]["header"][
                "objectiveValue"], mapping_result
    else:
        with open("./optimization_results/p5_cplex_result_{}.json".format(
                test_num)) as f:
            result = json.load(f)
            for i in result["CPLEXSolution"]["variables"]:
                if ("x_" in i["name"]) and i["value"] == str(1):
                    print("{} = 1".format(i["name"]))
            print("*** Delay cost: {} ***".format(
                result["CPLEXSolution"]["header"]["objectiveValue"]))
            return result["CPLEXSolution"]["header"]["objectiveValue"]
예제 #10
0
solver_manager = SolverManagerFactory('neos')
opt = SolverFactory('cplex', solver_io='lp')
results = solver_manager.solve(model, opt=opt)
model.solutions.store_to(results)
print(results)

# >>>> 3. docplexcloud
# key = #DOCPLEX API KEY (get from docloud website)
# base_url = #DOCPLEX URL (get from docloud website)
# Dependencies : docloud (pip install docloud)

import json
import glob
import pandas
from docloud.job import JobClient
client = JobClient(base_url, key)
model.write("temp.lp", io_options={"symbolic_solver_labels": True})
# with open("temp.lp") as lpfile:
#     resp = client.execute(input=lpfile,output=None,load_solution = True)
file = glob.glob("temp.lp")
resp = client.execute(input=file, output=None, load_solution=True)
solution = json.loads(resp.solution.decode("utf-8"))
#os.remove(file)
for i, k in solution['CPLEXSolution']['header'].items():
    print(i, ':', k)
results = pandas.DataFrame(solution['CPLEXSolution']['variables']).filter(
    items=['index', 'name', 'value', 'status'])
print(results)

# >>>> 4. GUROBI
예제 #11
0
def run(choice, threshold, confidence_interval, facility_number,
        min_threshold):
    """
    This solves the corresponding optimization problem to cplex cloud.
    It takes 5 parameters:
        i) choice                : An integer , (1-5) which represents the optimization model choice of the user
        ii) threshold(m)         : An integer , which represents the maximum possible distance between two districts which are counted as appropriate_pairs , meter
        iii) confidence_interval : An integer , (0-100) which represents the confidence level of the stochastic model
        iv) facility_number      : An integer , (0 - NUMBER_OF_DISTRICT) which represents the maximum fire station number for maximum coverage models
        v) min_threshold(min)    : An integer , which represents the maximum traveling time between two districts in terms of minutes.

    It returns nothing but it creates a .txt file that contains the solution of the corresponding optimization problem.
    """
    #Create a clinet
    client = JobClient(CPLEX_BASE_URL, CPLEX_API_KEY)

    # Choice == 1 , base model
    if choice == 1:
        resp = client.execute(input=[
            "Mod_Files/BaseModel.mod",
            "Mod_Files/BaseModel_" + str(threshold) + ".dat"
        ],
                              output="Solutions/BaseModel_Sol_" +
                              str(threshold) + ".txt")

        return "Solutions/BaseModel_Sol_" + str(threshold) + ".txt"

    # Choice == 2 , max_coverage model
    if choice == 2:
        resp = client.execute(input=[
            "Mod_Files/MultiCoverage.mod",
            "Mod_Files/MultiCoverage_" + str(threshold) + ".dat"
        ],
                              output="Solutions/MultiCoverage_Sol_" +
                              str(threshold) + ".txt")

        return "Solutions/MultiCoverage_Sol_" + str(threshold) + ".txt"

    # Choice == 3 , max coverage
    if choice == 3:

        resp = client.execute(input=[
            "Mod_Files/MaxCoverage.mod", "Mod_Files/MaxCoverage_" +
            str(threshold) + "_" + str(facility_number) + ".dat"
        ],
                              output="Solutions/MaxCoverage_Sol_" +
                              str(threshold) + "_" + str(facility_number) +
                              ".txt")

        return "Solutions/MaxCoverage_Sol_" + str(threshold) + "_" + str(
            facility_number) + ".txt"

    # Choice == 4 , stochastic_coverage
    if choice == 4:
        resp = client.execute(input=[
            "Mod_Files/StochasticCoverage.mod",
            "Mod_Files/Stochastic_Coverage_" + str(min_threshold) + "_" +
            str(confidence_interval) + ".dat"
        ],
                              output="Solutions/Stochastic_Coverage_Sol_" +
                              str(min_threshold) + "_" +
                              str(confidence_interval) + ".txt")

        return "Solutions/Stochastic_Coverage_Sol_" + str(
            min_threshold) + "_" + str(confidence_interval) + ".txt"

    # Choice == 5 , stochastic max coverage
    if choice == 5:
        resp = client.execute(input=[
            "Mod_Files/MaxCoverage.mod",
            "Mod_Files/Stochastic_MaxCoverage_" + str(min_threshold) + "_" +
            str(facility_number) + "_" + str(confidence_interval) + ".dat"
        ],
                              output="Solutions/Stochastic_MaxCoverage_Sol_" +
                              str(min_threshold) + "_" + str(facility_number) +
                              "_" + str(confidence_interval) + ".txt")

        return "Solutions/Stochastic_MaxCoverage_Sol_" + str(
            min_threshold) + "_" + str(facility_number) + "_" + str(
                confidence_interval) + ".txt"