示例#1
0
def splitlhsnpoints():
    from apprentice import tools
    m = 5
    n = 5
    print("n\tM\tN\tTP\tFPp\tTFP\tIN\n")
    for dim in range(2, 10):
        npoints = 2 * tools.numCoeffsRapp(dim, [m, n])
        facepointsper = int(2 * tools.numCoeffsRapp(dim - 1, [m, n]) /
                            (2 * dim))
        totalfacepoints = 2 * dim * facepointsper
        inpoints = int(npoints - totalfacepoints)

        print("%d\t%d\t%d\t%d\t%d\t%d\t%d" %
              (dim, m, n, npoints, facepointsper, totalfacepoints, inpoints))
    def fit(self, **kwargs):
        """
        Do everything.
        """
        # Set M, N, K, polynomial structures
        from apprentice import tools
        n_required = tools.numCoeffsRapp(self.dim, (self.m, self.n))
        if n_required > self._Y.size:
            raise Exception("Not enough inputs: got %i but require %i to do m=%i n=%i"%(self._Y.size, n_required, self.m,self.n))

        self.setStructures()

        from apprentice import monomial
        VM = monomial.vandermonde(self._X, self._m)
        VN = monomial.vandermonde(self._X, self._n)
        strategy=kwargs["strategy"] if kwargs.get("strategy") is not None else 1
        if   strategy==1: self.coeffSolve( VM, VN)
        elif strategy==2: self.coeffSolve2(VM, VN)
        elif strategy==3: self.coeffSolve3(VM, VN)
        # NOTE, strat 1 is faster for smaller problems (Npoints < 250)
        else: raise Exception("fit() strategy %i not implemented"%strategy)
示例#3
0
def tabletotalcputime(farr, noisearr, ts, table_or_latex):
    print(farr)
    print(noisearr)

    # allsamples = ['mc','lhs','so','sg']
    # allsamples = ['lhs','splitlhs','sg']
    # allsamples = ['sg']
    # allsamples = ['splitlhs']
    # allsamples = ['lhs','splitlhs']
    allsamples = ['sg', 'lhs', 'splitlhs']
    allsampleslabels = ['SG', 'LHS', 'd-LHD']
    import json
    from apprentice import tools
    results = {}
    dumpr = {}
    for snum, sample in enumerate(allsamples):
        results[sample] = {}
        dumpr[sample] = {}
        for num, fname in enumerate(farr):
            results[sample][fname] = {}
            m = 5
            n = 5

            for noise in noisearr:
                results[sample][fname][noise] = {}
                noisestr, noisepct = getnoiseinfo(noise)

                timepa = []
                timera = []
                timerard = []
                timerasip = []
                iterrasip = []
                iterrasipnonlog = []
                fittime = []
                mstime = []
                for run in ["exp1", "exp2", "exp3", "exp4", "exp5"]:
                    fndesc = "%s%s_%s_%s" % (fname, noisestr, sample, ts)
                    folder = "results/%s/%s" % (run, fndesc)
                    # print(folder)
                    pq = "p%d_q%d" % (m, n)
                    # print(run, fname,noisestr,sample,m,n)

                    rappsipfile = "%s/outrasip/%s_%s_ts%s.json" % (
                        folder, fndesc, pq, ts)
                    rappfile = "%s/outra/%s_%s_ts%s.json" % (folder, fndesc,
                                                             pq, ts)
                    rapprdfile = "%s/outrard/%s_%s_ts%s.json" % (
                        folder, fndesc, pq, ts)
                    pappfile = "%s/outpa/%s_%s_ts%s.json" % (folder, fndesc,
                                                             pq, ts)

                    if not os.path.exists(rappsipfile):
                        print("rappsipfile %s not found" % (rappsipfile))
                        if (knowmissing(rappsipfile)):
                            if (sample == "sg"):
                                break
                            continue
                        exit(1)

                    if not os.path.exists(rappfile):
                        print("rappfile %s not found" % (rappfile))
                        if (knowmissing(rappfile)):
                            if (sample == "sg"):
                                break
                            continue
                        exit(1)

                    if not os.path.exists(rapprdfile):
                        print("rapprdfile %s not found" % (rapprdfile))
                        if (knowmissing(rapprdfile)):
                            if (sample == "sg"):
                                break
                            continue
                        exit(1)

                    if not os.path.exists(pappfile):
                        print("pappfile %s not found" % (pappfile))
                        if (knowmissing(pappfile)):
                            if (sample == "sg"):
                                break
                            continue
                        exit(1)

                    dim = getdim(fname)
                    rdof = tools.numCoeffsRapp(dim, [int(m), int(n)])
                    rpnnl = tools.numCoeffsPoly(dim, m) - (dim + 1)
                    rqnnl = tools.numCoeffsPoly(dim, n) - (dim + 1)

                    if rappsipfile:
                        with open(rappsipfile, 'r') as fn:
                            datastore = json.load(fn)
                    rappsiptime = datastore['log']['fittime']
                    rnoiters = len(datastore['iterationinfo'])
                    timerasip.append(rappsiptime)
                    # timerasip.append(rappsiptime)
                    iterrasip.append(rnoiters)
                    iterrasipnonlog.append(rnoiters)
                    ft = 0.
                    mt = 0.
                    for iter in datastore['iterationinfo']:
                        ft += iter['log']['time']
                        mt += iter['robOptInfo']['info'][0]['log']['time']
                    fittime.append(ft)
                    mstime.append(mt)
                    # fittime.append(ft)
                    # mstime.append(mt)

                    if rappfile:
                        with open(rappfile, 'r') as fn:
                            datastore = json.load(fn)
                    rapptime = datastore['log']['fittime']
                    timera.append(rapptime)
                    # timera.append(rapptime)

                    if rapprdfile:
                        with open(rapprdfile, 'r') as fn:
                            datastore = json.load(fn)
                    rapprdtime = datastore['log']['fittime']
                    timerard.append(rapprdtime)
                    # timerard.append(rapprdtime)

                    if pappfile:
                        with open(pappfile, 'r') as fn:
                            datastore = json.load(fn)
                    papptime = datastore['log']['fittime']
                    pdof = tools.numCoeffsPoly(datastore['dim'],
                                               datastore['m'])
                    timepa.append(papptime)
                    # timepa.append(papptime)
                    if (sample == "sg"):
                        break

                missingmean = -1

                if len(timerard) == 0:
                    rapprd = missingmean
                    rapprdsd = 0
                else:
                    rapprd = np.average(timerard)
                    rapprdsd = np.std(timerard)

                if len(timera) == 0:
                    rapp = missingmean
                    rappsd = 0
                else:
                    rapp = np.average(timera)
                    rappsd = np.std(timera)

                if len(timepa) == 0:
                    papp = missingmean
                    pappsd = 0
                else:
                    papp = np.average(timepa)
                    pappsd = np.std(timepa)

                if len(timerasip) == 0:
                    rappsip = missingmean
                    rappsipsd = 0
                else:
                    rappsip = np.average(timerasip)
                    rappsipsd = np.std(timerasip)

                if len(iterrasip) == 0:
                    rnoiters = missingmean
                    rnoiterssd = 0
                else:
                    rnoiters = np.average(iterrasip)
                    rnoiterssd = np.std(iterrasip)

                if len(fittime) == 0:
                    rfittime = missingmean
                    rfittimesd = 0
                else:
                    rfittime = np.average(fittime)
                    rfittimesd = np.std(fittime)

                if len(mstime) == 0:
                    rmstime = missingmean
                    rmstimesd = 0
                else:
                    rmstime = np.average(mstime)
                    rmstimesd = np.std(mstime)

                results[sample][fname][noise] = {
                    "rapprd": rapprd,
                    "rapprdsd": rapprdsd,
                    "rapp": rapp,
                    "rappsd": rappsd,
                    "rappsip": rappsip,
                    "rappsipsd": rappsipsd,
                    "papp": papp,
                    "pappsd": pappsd,
                    'rnoiters': rnoiters,
                    'rnoiterssd': rnoiterssd,
                    'rfittime': rfittime,
                    'rfittimesd': rfittimesd,
                    'rmstime': rmstime,
                    'rmstimesd': rmstimesd,
                    'pdof': pdof,
                    'rdof': rdof,
                    'rpnnl': rpnnl,
                    'rqnnl': rqnnl
                }
                dumpr[sample][fname] = iterrasipnonlog

        # from IPython import embed
        # embed()

    # print(results)

    #############################################
    #iteration summary latex
    #############################################
    # python tabletotalcputime.py  f1,f2,f3,f4,f5,f7,f8,f9,f10,f12,f13,f14,f15,f16,f17,f18,f19,f20,f21,f22 0 2x latex
    noise = noisearr[0]
    metricarr = ['amean', 'gmean', 'median', 'range']
    metricarrlabel = ['Arithmetic Mean', 'Geometric Mean', 'Median', 'Range']
    stats = {}
    s = ""
    for mnum, metr in enumerate(metricarr):
        s += "%s" % (metricarrlabel[mnum])
        for snum, sample in enumerate(allsamples):
            data = []
            for fnum, fname in enumerate(farr):
                data.append(results[sample][fname][noise]['rnoiters'])
                if (fname == 'f20'):
                    print(results[sample][fname][noise]['rnoiters'])

            print(np.max(data), np.min(data))
            stat = getstats(data, metr)
            s += "&%.2f" % (stat)
        s += "\n\\\\\\hline\n"
    print(s)

    #############################################
    #cputime summary latex
    #############################################
    noise = noisearr[0]
    metricarr = ['amean', 'gmean', 'median', 'range']
    metricarrlabel = ['Arithmetic Mean', 'Geometric Mean', 'Median', 'Range']
    methodarr = ['papp', 'rapp', 'rapprd', 'rfittime', 'rmstime']
    stats = {}
    sample = 'splitlhs'
    s = ""

    for mnum, metr in enumerate(metricarr):
        s += "%s" % (metricarrlabel[mnum])
        for menum, method in enumerate(methodarr):
            data = []
            for fnum, fname in enumerate(farr):
                data.append(results[sample][fname][noise][method])

            stat = getstats(data, metr)
            s += "&%.2f" % (stat)
        s += "\n\\\\\\hline\n"
    print(s)

    #############################################
    #cputime and iteration electronic suppliment latex
    #############################################
    # python tabletotalcputime.py  f1,f2,f3,f4,f5,f7,f8,f9,f10,f12,f13,f14,f15,f16,f17,f18,f19,f20,f21,f22 0 2x latex
    # python tabletotalcputime.py  f1,f2,f3,f4,f5,f7,f8,f9,f10,f12,f13,f14,f15,f16,f17,f18,f19,f20,f21,f22 10-6 2x latex
    # python tabletotalcputime.py  f1,f2,f3,f4,f5,f7,f8,f9,f10,f12,f13,f14,f15,f16,f17,f18,f19,f20,f21,f22 10-2 2x latex
    noise = noisearr[0]
    s = ""
    keyarr = ['rapp', 'rapprd', 'rfittime', 'rmstime', 'rnoiters', 'papp']

    for fnum, fname in enumerate(farr):
        s += "\\multirow{3}{*}{\\ref{fn:%s}}" % (fname)
        for snum, sample in enumerate(allsamples):
            s += "&%s" % (allsampleslabels[snum])
            for knum, key in enumerate(keyarr):
                statsarr = [key, key + 'sd']
                for stnum, stat in enumerate(statsarr):
                    val = results[sample][fname][noise][stat]
                    if sample == 'sg' and stnum == 1:
                        s += "&-"
                    elif val == int(val):
                        s += "&%d" % (int(val))
                    elif val < 10**-2 or val > 10**2:
                        s += "&%.2E" % (val)
                    else:
                        s += "&%.2f" % (val)
            if snum < len(allsamples) - 1:
                s += "\n\\\\*  \\cline{2-14}\n"
        s += "\n\\\\ \\hline\n"
    print(s)

    exit(1)
    import json
    with open("results/plots/Jiterations.json", "w") as f:
        json.dump(dumpr, f, indent=4, sort_keys=True)

    baseline = 2
    totalrow = 3
    totalcol = 3
    import matplotlib.pyplot as plt
    ffffff = plt.figure(0, figsize=(45, 20))
    axarray = []
    width = 0.21
    ecolor = 'black'
    X111 = np.arange(len(farr))
    meankeyarr = ['papp', 'rapp', 'rapprd', 'rappsip']
    sdkeyarr = ['pappsd', 'rappsd', 'rapprdsd', 'rappsipsd']
    legendarr = [
        'Polynomial Approx. ',
        'Algorithm \\ref{ALG:MVVandQR} without degree reduction',
        'Algorithm \\ref{ALG:MVVandQR}', 'MST Algorithm \\ref{A:Polyak}'
    ]
    legend2arr = [None, None, None, 'FT Algorithm \\ref{A:Polyak}']
    color = ['#FFC300', '#FF5733', '#C70039', '#900C3F']
    colorfittime = ['yellow', 'yellow', 'yellow', 'yellow']
    props = dict(boxstyle='square', facecolor='wheat', alpha=0.5)
    plt.rc('ytick', labelsize=20)
    plt.rc('xtick', labelsize=20)
    for nnum, noise in enumerate(noisearr):
        for snum, sample in enumerate(allsamples):
            mean = {}
            fitmean = {}
            for type in meankeyarr:
                mean[type] = []
                fitmean[type] = []
            sd = {}
            fitsd = {}
            for type in sdkeyarr:
                sd[type] = []
                fitsd[type] = []
            for fname in farr:
                for type in meankeyarr:
                    mean[type].append(results[sample][fname][noise][type])
                    # mean[type].append(np.ma.log10(results[sample][fname][noise][type]))
                for type in sdkeyarr:
                    # print(results[sample][fname][noise][type])
                    sd[type].append(results[sample][fname][noise][type])
                    # sd[type].append(np.ma.log10(results[sample][fname][noise][type]))

                for type in meankeyarr:
                    if (type == "rappsip"):
                        fitmean[type].append(
                            results[sample][fname][noise]['rfittime'])
                    else:
                        fitmean[type].append(-1 * baseline)
                for type in sdkeyarr:
                    if (type == "rappsip"):
                        fitsd[type].append(
                            results[sample][fname][noise]['rfittimesd'])
                    else:
                        fitsd[type].append(0)

            if (len(axarray) > 0):
                ax = plt.subplot2grid((totalrow, totalcol), (nnum, snum),
                                      sharex=axarray[0],
                                      sharey=axarray[0])
                axarray.append(ax)
            else:
                ax = plt.subplot2grid((totalrow, totalcol), (nnum, snum))
                axarray.append(ax)

            # print(mean)
            # print(sd)
            for typenum, type in enumerate(meankeyarr):
                sdkey = sdkeyarr[typenum]
                # print(mean[type])
                if (sample == 'sg'):
                    ax.bar(X111 + typenum * width,
                           np.array(mean[type]) + baseline,
                           width,
                           color=color[typenum],
                           capsize=3,
                           label=legendarr[typenum])
                else:
                    ax.bar(X111 + typenum * width,
                           np.array(mean[type]) + baseline,
                           width,
                           color=color[typenum],
                           yerr=np.array(sd[sdkey]),
                           align='center',
                           ecolor=ecolor,
                           capsize=3)
            for typenum, type in enumerate(meankeyarr):
                sdkey = sdkeyarr[typenum]
                if (sample == 'sg'):
                    ax.bar(X111 + typenum * width,
                           np.array(fitmean[type]) + baseline,
                           width,
                           color=colorfittime[typenum],
                           capsize=3,
                           label=legend2arr[typenum])
                else:
                    ax.bar(X111 + typenum * width,
                           np.array(fitmean[type]) + baseline,
                           width,
                           color=colorfittime[typenum],
                           yerr=np.array(fitsd[sdkey]),
                           align='center',
                           ecolor=ecolor,
                           capsize=3)
                ax.set_xticks(X111 + (len(meankeyarr) - 1) * width / 2)
                xlab = []
                for f in farr:
                    # print(f)
                    # xlab.append("\\ref{fn:%s}"%(f))
                    # xlab.append("\\ref{%s}"%(f))
                    xlab.append("%s" % (f))
                ax.set_xticklabels(xlab, fontsize=20)
                ax.set_xlabel("Test functions", fontsize=22)
                ax.set_ylabel("$\\log_{10}$ [CPU time (sec)]", fontsize=22)
                ax.label_outer()

        if (nnum == 0):
            l1 = ffffff.legend(loc='upper center',
                               ncol=5,
                               fontsize=25,
                               borderaxespad=0.,
                               shadow=False)

    plt.gca().yaxis.set_major_formatter(
        mtick.FuncFormatter(lambda x, _: x - baseline))

    ffffff.savefig("../../log/cputime.png",
                   bbox_extra_artists=(l1, ),
                   bbox_inches='tight')

    plt.clf()
    plt.close('all')

    # Filtered plot
    totalrow = 3
    totalcol = 1
    import matplotlib.pyplot as plt
    ffffff = plt.figure(0, figsize=(45, 20))
    X111 = np.arange(len(farr) * len(noisearr))
    axarray = []
    width = 0.15
    ecolor = 'black'
    plt.rc('ytick', labelsize=20)
    plt.rc('xtick', labelsize=20)
    for snum, sample in enumerate(allsamples):
        mean = {}
        fitmean = {}
        for type in meankeyarr:
            mean[type] = []
            fitmean[type] = []
        sd = {}
        fitsd = {}
        for type in sdkeyarr:
            sd[type] = []
            fitsd[type] = []
        for nnum, noise in enumerate(noisearr):
            for fname in farr:
                for type in meankeyarr:
                    mean[type].append(results[sample][fname][noise][type])
                    # mean[type].append(np.ma.log10(results[sample][fname][noise][type]))
                for type in sdkeyarr:
                    # print(results[sample][fname][noise][type])
                    sd[type].append(results[sample][fname][noise][type])
                    # sd[type].append(np.ma.log10(results[sample][fname][noise][type]))

                for type in meankeyarr:
                    if (type == "rappsip"):
                        fitmean[type].append(
                            results[sample][fname][noise]['rfittime'])
                    else:
                        fitmean[type].append(-1 * baseline)
                for type in sdkeyarr:
                    if (type == "rappsip"):
                        fitsd[type].append(
                            results[sample][fname][noise]['rfittimesd'])
                    else:
                        fitsd[type].append(0)

        if (len(axarray) > 0):
            ax = plt.subplot2grid((totalrow, totalcol), (snum, 0),
                                  sharex=axarray[0],
                                  sharey=axarray[0])
            axarray.append(ax)
        else:
            ax = plt.subplot2grid((totalrow, totalcol), (snum, 0))
            axarray.append(ax)

        ax.set_xlim(-.3, 14.7)
        ax.spines['top'].set_visible(False)
        ax.spines['right'].set_visible(False)
        plt.axvspan(-.3, 3.7, alpha=0.5, color='pink')
        plt.axvspan(3.7, 9.7, alpha=0.5, color='lightgrey')
        plt.axvspan(9.7, 14.7, alpha=0.5, color='cyan')
        for typenum, type in enumerate(meankeyarr):
            sdkey = sdkeyarr[typenum]
            # print(mean[type])
            if (sample == 'sg'):
                ax.bar(X111 + typenum * width,
                       np.array(mean[type]) + baseline,
                       width,
                       color=color[typenum],
                       capsize=3,
                       label=legendarr[typenum])
            else:
                ax.bar(X111 + typenum * width,
                       np.array(mean[type]) + baseline,
                       width,
                       color=color[typenum],
                       yerr=np.array(sd[sdkey]),
                       align='center',
                       ecolor=ecolor,
                       capsize=3,
                       label=legendarr[typenum])
        for typenum, type in enumerate(meankeyarr):
            sdkey = sdkeyarr[typenum]
            if (sample == 'sg'):
                ax.bar(X111 + typenum * width,
                       np.array(fitmean[type]) + baseline,
                       width,
                       color=colorfittime[typenum],
                       capsize=3,
                       label=legend2arr[typenum])
            else:
                ax.bar(X111 + typenum * width,
                       np.array(fitmean[type]) + baseline,
                       width,
                       color=colorfittime[typenum],
                       yerr=np.array(fitsd[sdkey]),
                       align='center',
                       ecolor=ecolor,
                       capsize=3,
                       label=legend2arr[typenum])

        if (snum == 0):
            l1 = ffffff.legend(loc='upper center', ncol=5, fontsize=25)
            noiselegendarr = [
                '$\\epsilon=0$', '$\\epsilon=10^{-6}$', '$\\epsilon=10^{-2}$'
            ]
            l2 = ffffff.legend(noiselegendarr,
                               loc='upper center',
                               ncol=4,
                               bbox_to_anchor=(0.435, 0.85),
                               fontsize=20,
                               borderaxespad=0.,
                               shadow=False)
        ax.set_xticks(X111 + (len(meankeyarr) - 1) * width / 2)
        xlab = []
        for f in farr:
            # print(f)
            # xlab.append("\\ref{fn:%s}"%(f))
            # xlab.append("\\ref{%s}"%(f))
            xlab.append("%s" % (f))
        xlab1 = np.concatenate((xlab, xlab, xlab), axis=None)
        ax.set_xticklabels(xlab1, fontsize=20)
        ax.set_xlabel("Test functions", fontsize=22)
        ax.set_ylabel("$\\log_{10}$ [CPU time (sec)]", fontsize=22)
        ax.label_outer()

    plt.gca().yaxis.set_major_formatter(
        mtick.FuncFormatter(lambda x, _: x - baseline))

    ffffff.savefig("../../log/cputime2.png",
                   bbox_extra_artists=(l1, ),
                   bbox_inches='tight')

    plt.clf()
    plt.close('all')

    # CPU time plot for paper
    import matplotlib as mpl
    # mpl.use('pgf')
    # pgf_with_custom_preamble = {
    #     "text.usetex": True,    # use inline math for ticks
    #     "pgf.rcfonts": False,   # don't setup fonts from rc parameters
    #     "pgf.preamble": [
    #         "\\usepackage{amsmath}",         # load additional packages
    #     ]
    # }
    # mpl.rcParams.update(pgf_with_custom_preamble)

    totalrow = 1
    totalcol = 1
    meankeyarr1 = ['papp', 'rapp', 'rapprd', 'rappsip', 'rfittime']
    sdkeyarr1 = ['pappsd', 'rappsd', 'rapprdsd', 'rappsipsd', 'rmstimesd']
    # color1 = ['#900C3F','#C70039','#FF5733','#FFC300','pink']
    color1 = ["m", "c", "g", "b"]
    # legendarr1 = ['Polynomial Approximation ','Algorithm \\ref{ALG:MVVandQR} without degree reduction','Algorithm \\ref{ALG:MVVandQR} with degree reduction' ,'Algorithm \\ref{A:Polyak}: fit time','Algorithm \\ref{A:Polyak}: multistart time']
    legendarr1 = [
        '$p(x)$', '$r_1(x)$', '$r_2(x)$',
        '$r_3(x)\\mathrm{:\\ multistart\\ time}$',
        '$r_3(x)\\mathrm{:\\ fit\\ time}$'
    ]
    import matplotlib.pyplot as plt
    from matplotlib.ticker import ScalarFormatter
    mpl.rc('text', usetex=True)
    mpl.rc('font', family='serif', size=12)
    mpl.rc('font', weight='bold')
    mpl.rcParams['text.latex.preamble'] = [r'\usepackage{sfmath} \boldmath']
    # mpl.style.use("ggplot")

    mpl.rc('font', family='serif')

    ffffff = plt.figure(0, figsize=(15, 10))
    X111 = np.arange(len(farr) * len(noisearr))
    axarray = []
    width = 0.2
    ecolor = 'black'
    plt.rc('ytick', labelsize=20)
    plt.rc('xtick', labelsize=20)
    for snum, sample in enumerate(allsamples):
        mean = {}
        for type in meankeyarr1:
            mean[type] = []
        sd = {}
        for type in sdkeyarr1:
            sd[type] = []
        for nnum, noise in enumerate(noisearr):
            for fname in farr:
                for type in meankeyarr1:
                    mean[type].append(results[sample][fname][noise][type])
                    # mean[type].append(np.ma.log10(results[sample][fname][noise][type]))
                for type in sdkeyarr1:
                    # print(results[sample][fname][noise][type])
                    sd[type].append(results[sample][fname][noise][type])
                    # sd[type].append(np.ma.log10(results[sample][fname][noise][type]))

        if (len(axarray) > 0):
            ax = plt.subplot2grid((totalrow, totalcol), (snum, 0),
                                  sharex=axarray[0],
                                  sharey=axarray[0])
            axarray.append(ax)
        else:
            ax = plt.subplot2grid((totalrow, totalcol), (snum, 0))
            axarray.append(ax)
        # ax.xaxis.set_major_locator(MaxNLocator(integer=True))
        # ax.yaxis.set_major_locator(MaxNLocator(integer=True))
        # ax.set_xlim(-.3,14.7)
        # ax.spines['top'].set_visible(False)
        # ax.spines['right'].set_visible(False)
        # plt.axvspan(-.3, 3.7, alpha=0.5, color='pink')
        # plt.axvspan(3.7, 9.7, alpha=0.5, color='lightgrey')
        # plt.axvspan(9.7, 14.7, alpha=0.5, color='cyan')
        alignarr = [0.5, 0.5, 0.5, 0.2]
        for typenum, type in enumerate(meankeyarr1):
            sdkey = sdkeyarr1[typenum]
            # print(mean[type])
            if (sample == 'sg'):
                ax.bar(X111 + typenum * width,
                       np.array(mean[type]),
                       width,
                       color=color1[typenum],
                       capsize=3,
                       label=legendarr1[typenum])
            else:
                ax.bar(X111 + typenum * width,
                       np.array(mean[type]),
                       width,
                       color=color1[typenum],
                       alpha=alignarr[typenum],
                       align='center',
                       ecolor=ecolor,
                       capsize=3,
                       label=legendarr1[typenum])
                ax.vlines(X111 + typenum * width, np.array(mean[type]),
                          np.array(mean[type]) + np.array(sd[sdkey]))
            if (typenum == 3):
                newtn = typenum + 1
                newtype = meankeyarr1[newtn]
                newsdkey = sdkeyarr1[newtn]
                ax.bar(X111 + typenum * width,
                       np.array(mean[newtype]),
                       width,
                       color=color1[typenum],
                       alpha=0.5,
                       align='center',
                       ecolor=ecolor,
                       capsize=3,
                       label=legendarr1[newtn])
                ax.vlines(X111 + typenum * width, np.array(mean[type]),
                          np.array(mean[type]) + np.array(sd[newsdkey]))
                break

        if (snum == 0):
            l1 = ax.legend(loc='upper left',
                           ncol=1,
                           fontsize=20,
                           framealpha=1,
                           shadow=True,
                           frameon=False)
            l1.get_frame().set_facecolor('white')
            noiselegendarr = [
                '$\\epsilon=0$', '$\\epsilon=10^{-6}$', '$\\epsilon=10^{-2}$'
            ]
            # l2 = ffffff.legend(noiselegendarr,loc='upper center', ncol=4,bbox_to_anchor=(0.435, 0.85), fontsize = 20,borderaxespad=0.,shadow=False)
        ax.set_xticks(X111 + (len(meankeyarr) - 1) * width / 2)
        ax.set_yscale("log")
        xlab = []
        for f in farr:
            # print(f)
            xlab.append("\\ref{fn:%s}" % (f))
            # xlab.append("\\ref{%s}"%(f))
            # xlab.append("%s"%(f))
        # xlab1 = np.concatenate((xlab,xlab,xlab),axis=None)
        xlab11 = ['$A.1.4$', '$A.1.7$', '$A.1.15$', '$A.1.16$', '$A.1.17$']
        ax.set_xticklabels(xlab11, fontsize=20)

        plt.rc('ytick', labelsize=20)
        plt.rc('xtick', labelsize=20)
        plt.tick_params(labelsize=20)
        # ax.set_xlabel("Test functions",fontsize=22)
        ax.set_ylabel("$\\mathrm{CPU\\ time\\ (sec)}$", fontsize=20)
        # for axis in [ax.xaxis, ax.yaxis]:
        #     axis.set_major_formatter(ScalarFormatter())

        ax.label_outer()

    # plt.gca().yaxis.set_major_formatter(mtick.FuncFormatter(lambda x,_: x-baseline))

    # ffffff.savefig("../../log/cputimeplot.pgf", bbox_extra_artists=(l1,), bbox_inches='tight')
    # ffffff.savefig("../../log/cputimeplot.png", bbox_extra_artists=(l1,), bbox_inches='tight')
    ffffff.savefig("../../log/cputimeplot.pdf",
                   bbox_extra_artists=(l1, ),
                   bbox_inches='tight')

    plt.clf()
    plt.close('all')

    exit(1)

    # Iteration plot
    import matplotlib as mpl
    mpl.rc('text', usetex=True)
    mpl.rc('font', family='serif', size=12)
    mpl.rc('font', weight='bold')
    mpl.rcParams['text.latex.preamble'] = [r'\usepackage{sfmath} \boldmath']
    # mpl.style.use("ggplot")
    # mpl.use('pgf')
    # pgf_with_custom_preamble = {
    #     "text.usetex": True,    # use inline math for ticks
    #     "pgf.rcfonts": False,   # don't setup fonts from rc parameters
    #     "pgf.preamble": [
    #         "\\usepackage{amsmath}",         # load additional packages
    #     ]
    # }
    # mpl.rcParams.update(pgf_with_custom_preamble)
    color = ['#FFC300', '#FF5733', '#900C3F']
    X111 = np.arange(len(farr))
    ffffff = plt.figure(0, figsize=(15, 10))
    # ffffff = plt.figure(0)
    plt.rc('ytick', labelsize=20)
    plt.rc('xtick', labelsize=20)
    totalrow = 1
    totalcol = len(noisearr)
    baseline = 0
    width = 0.4
    axarray = []
    legendarr = [
        '$\\mathrm{Latin\\ Hypercube\\ Sampling\\ (LHS)}$',
        '$\\mathrm{decoupled\\ Latin\\ Hypercube\\ Design\\ (d-LHD)}$'
    ]
    for nnum, noise in enumerate(noisearr):
        mean = {}
        sd = {}
        for snum, sample in enumerate(allsamples):
            mean[sample] = []
            sd[sample] = []
            for fname in farr:
                # mean[sample].append(results[sample][fname][noise]['rnoiters'])
                # sd[sample].append(results[sample][fname][noise]['rnoiterssd'])
                mean[sample].append(np.average(dumpr[sample][fname]))
                sd[sample].append(np.std(dumpr[sample][fname]))
        if (len(axarray) > 0):
            ax = plt.subplot2grid((totalrow, totalcol), (0, nnum),
                                  sharex=axarray[0],
                                  sharey=axarray[0])
            axarray.append(ax)
        else:
            ax = plt.subplot2grid((totalrow, totalcol), (0, nnum))
            axarray.append(ax)

        for snum, sample in enumerate(allsamples):
            # print(mean[type])
            if (sample == 'sg'):
                ax.bar(X111 + snum * width,
                       np.array(mean[sample]),
                       width,
                       color=color[snum],
                       capsize=3)
            else:
                ax.bar(X111 + snum * width,
                       np.array(mean[sample]),
                       width,
                       color=color[snum],
                       align='center',
                       ecolor=ecolor,
                       capsize=3,
                       label=legendarr[snum])

                ax.vlines(X111 + snum * width,
                          np.array(mean[sample]),
                          np.array(mean[sample]) + np.array(sd[sample]),
                          label=None)
        ax.set_xticks(X111 + (len(allsamples) - 1) * width / 2)

        xlab = []
        for f in farr:
            # print(f)
            xlab.append("\\ref{fn:%s}" % (f))
            # xlab.append("\\ref{%s}"%(f))
            # xlab.append("%s"%(f))
        xlab = ['$A.1.4$', '$A.1.7$', '$A.1.15$', '$A.1.16$', '$A.1.17$']
        ax.set_xticklabels(xlab, fontsize=24)

        plt.tick_params(labelsize=24)
        # ax.set_xlabel("Test functions",fontsize=22)
        # ax.set_ylabel("$\\log_{10}$ [Number of iterations]",fontsize=40)
        ax.set_ylabel("$\\mathrm{Number\\ of\\ iterations}$", fontsize=28)
        ax.label_outer()
    l1 = ax.legend(loc='upper left', ncol=1, fontsize=24, frameon=False)
    # plt.gca().yaxis.set_major_formatter(mtick.FuncFormatter(lambda x,_: x-baseline))
    plt.tight_layout()
    # plt.savefig("../../log/iterations.png")
    # ffffff.savefig("../../log/iterations.png", bbox_extra_artists=(l1,), bbox_inches='tight')
    ffffff.savefig("../../log/iterations.pdf",
                   bbox_extra_artists=(l1, ),
                   bbox_inches='tight')
    plt.clf()
    plt.close('all')

    exit(1)
示例#4
0
def generatebenchmarkdata(m, n):
    seedarr = [54321, 456789, 9876512, 7919820, 10397531]
    folder = "results"
    samplearr = ["mc", "lhs", "so", "sg", "splitlhs"]
    # samplearr = ["lhs","sg","splitlhs"]
    # samplearr = ["lhs","splitlhs"]
    # samplearr = ["splitlhs"]
    from apprentice import tools
    from pyDOE import lhs
    import apprentice
    ts = 2
    farr = getfarr()
    for fname in farr:
        dim = getdim(fname)
        minarr, maxarr = getbox(fname)
        npoints = ts * tools.numCoeffsRapp(dim, [int(m), int(n)])
        print(npoints)
        for sample in samplearr:
            for numex, ex in enumerate(
                ["exp1", "exp2", "exp3", "exp4", "exp5"]):
                seed = seedarr[numex]
                np.random.seed(seed)
                if (sample == "mc"):
                    Xperdim = ()
                    for d in range(dim):
                        Xperdim = Xperdim + (np.random.rand(npoints, ) *
                                             (maxarr[d] - minarr[d]) +
                                             minarr[d], )
                    X = np.column_stack(Xperdim)
                    formatStr = "{0:0%db}" % (dim)
                    for d in range(2**dim):
                        binArr = [int(x) for x in formatStr.format(d)[0:]]
                        val = []
                        for i in range(dim):
                            if (binArr[i] == 0):
                                val.append(minarr[i])
                            else:
                                val.append(maxarr[i])
                        X[d] = val
                elif (sample == "sg"):
                    from dolo.numeric.interpolation.smolyak import SmolyakGrid
                    s = 0
                    l = 1
                    while (s < npoints):
                        sg = SmolyakGrid(a=minarr, b=maxarr, l=l)
                        s = sg.grid.shape[0]
                        l += 1
                    X = sg.grid
                elif (sample == "so"):
                    X = my_i4_sobol_generate(dim, npoints, seed)
                    s = apprentice.Scaler(np.array(X, dtype=np.float64),
                                          a=minarr,
                                          b=maxarr)
                    X = s.scaledPoints
                elif (sample == "lhs"):
                    X = lhs(dim, samples=npoints, criterion='maximin')
                    s = apprentice.Scaler(np.array(X, dtype=np.float64),
                                          a=minarr,
                                          b=maxarr)
                    X = s.scaledPoints
                elif (sample == "splitlhs"):
                    epsarr = []
                    for d in range(dim):
                        #epsarr.append((maxarr[d] - minarr[d])/10)
                        epsarr.append(10**-6)

                    facepoints = int(
                        2 *
                        tools.numCoeffsRapp(dim - 1, [int(m), int(n)]))
                    insidepoints = int(npoints - facepoints)
                    Xmain = np.empty([0, dim])
                    # Generate inside points
                    minarrinside = []
                    maxarrinside = []
                    for d in range(dim):
                        minarrinside.append(minarr[d] + epsarr[d])
                        maxarrinside.append(maxarr[d] - epsarr[d])
                    X = lhs(dim, samples=insidepoints, criterion='maximin')
                    s = apprentice.Scaler(np.array(X, dtype=np.float64),
                                          a=minarrinside,
                                          b=maxarrinside)
                    X = s.scaledPoints
                    Xmain = np.vstack((Xmain, X))

                    #Generate face points
                    perfacepoints = int(np.ceil(facepoints / (2 * dim)))
                    index = 0
                    for d in range(dim):
                        for e in [minarr[d], maxarr[d]]:
                            index += 1
                            np.random.seed(seed + index * 100)
                            X = lhs(dim,
                                    samples=perfacepoints,
                                    criterion='maximin')
                            minarrface = np.empty(shape=dim, dtype=np.float64)
                            maxarrface = np.empty(shape=dim, dtype=np.float64)
                            for p in range(dim):
                                if (p == d):
                                    if e == maxarr[d]:
                                        minarrface[p] = e - epsarr[d]
                                        maxarrface[p] = e
                                    else:
                                        minarrface[p] = e
                                        maxarrface[p] = e + epsarr[d]
                                else:
                                    minarrface[p] = minarr[p]
                                    maxarrface[p] = maxarr[p]
                            s = apprentice.Scaler(np.array(X,
                                                           dtype=np.float64),
                                                  a=minarrface,
                                                  b=maxarrface)
                            X = s.scaledPoints
                            Xmain = np.vstack((Xmain, X))
                    Xmain = np.unique(Xmain, axis=0)
                    X = Xmain
                    formatStr = "{0:0%db}" % (dim)
                    for d in range(2**dim):
                        binArr = [int(x) for x in formatStr.format(d)[0:]]
                        val = []
                        for i in range(dim):
                            if (binArr[i] == 0):
                                val.append(minarr[i])
                            else:
                                val.append(maxarr[i])
                        X[d] = val

                if not os.path.exists(folder + "/" + ex + '/benchmarkdata'):
                    os.makedirs(folder + "/" + ex + '/benchmarkdata',
                                exist_ok=True)
                for noise in ["0", "10-2", "10-4", "10-6"]:
                    noisestr, noisepct = getnoiseinfo(noise)

                    Y = getData(X, fn=fname, noisepct=noisepct, seed=seed)

                    outfile = "%s/%s/benchmarkdata/%s%s_%s.txt" % (
                        folder, ex, fname, noisestr, sample)
                    print(outfile)
                    np.savetxt(outfile, np.hstack((X, Y.T)), delimiter=",")
                if (sample == "sg"):
                    break
示例#5
0
def generatespecialdata():
    from apprentice import tools
    from pyDOE import lhs
    m = 5
    n = 5
    dim = 2
    farr = ["f8", "f9", "f12"]
    noisearr = ["0", "10-12", "10-10", "10-8", "10-6", "10-4"]
    ts = 2
    npoints = ts * tools.numCoeffsRapp(dim, [int(m), int(n)])
    # sample = "sg"
    # from dolo.numeric.interpolation.smolyak import SmolyakGrid
    # s = 0
    # l = 2
    # while(s < npoints):
    #     sg = SmolyakGrid(a=[-1,-1],b=[1,1], l=l)
    #     s = sg.grid.shape[0]
    #     l+=1
    # X = sg.grid
    # lennn = sg.grid.shape[0]

    # import apprentice
    # sample = "lhs"
    # X = lhs(dim, samples=npoints, criterion='maximin')
    # s = apprentice.Scaler(np.array(X, dtype=np.float64), a=[-1,-1], b=[1,1])
    # X = s.scaledPoints
    # lennn = npoints

    import apprentice
    seed = 54321
    sample = "so"
    X = my_i4_sobol_generate(dim, npoints, seed)
    s = apprentice.Scaler(np.array(X, dtype=np.float64), a=[-1, -1], b=[1, 1])
    X = s.scaledPoints
    lennn = npoints

    stdnormalnoise = np.zeros(shape=(lennn), dtype=np.float64)
    for i in range(lennn):
        stdnormalnoise[i] = np.random.normal(0, 1)
    for fname in farr:
        minarr, maxarr = getbox(fname)

        for noise in noisearr:
            noisestr = ""
            noisepct = 0
            if (noise != "0"):
                noisestr = "_noisepct" + noise
            if (noise == "10-4"):
                noisepct = 10**-4
            elif (noise == "10-6"):
                noisepct = 10**-6
            elif (noise == "10-8"):
                noisepct = 10**-8
            elif (noise == "10-10"):
                noisepct = 10**-10
            elif (noise == "10-12"):
                noisepct = 10**-12
            print(noisepct)
            Y = getData(X, fn=fname, noisepct=0, seed=seed)
            Y_train = np.atleast_2d(
                np.array(Y) * (1 + noisepct * stdnormalnoise))
            outfolder = "/Users/mkrishnamoorthy/Desktop/Data"
            outfile = "%s/%s%s_%s.txt" % (outfolder, fname, noisestr, sample)
            print(outfile)
            np.savetxt(outfile, np.hstack((X, Y_train.T)), delimiter=",")
示例#6
0
def analyzefacevsinner():
    folder = "f18_f20_facevsinner"
    m = 5
    n = 5
    dim = 4
    tstimes = 2
    ts = "2x"
    # fname = "f20-"+str(dim)+"D_ts"+ts
    fname = "f20-" + str(dim) + "D"
    from apprentice import tools
    from pyDOE import lhs
    npoints = tstimes * tools.numCoeffsRapp(dim, [m, n])
    facespctarr = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
    import json
    file = folder + "/data.json"
    if file:
        with open(file, 'r') as fn:
            data = json.load(fn)
    X = []
    Y18m = []
    Y18s = []
    Y20m = []
    Y20s = []
    for facenum, facepct in enumerate(facespctarr):
        facepoints = int(np.ceil(npoints * facepct))
        insidepoints = int(npoints - facepoints)
        X.append(facepoints / insidepoints)
    for facenum, facepct in enumerate(facespctarr):
        facepoints = int(np.ceil(npoints * facepct))
        insidepoints = int(npoints - facepoints)
        f18iter = []
        f20iter = []
        for numex, ex in enumerate(["exp1", "exp2", "exp3", "exp4", "exp5"]):
            # for numex,ex in enumerate(["exp3"]):
            f18file = ""
            f20file = ""
            for d in data['info']:
                if (d['exp'] == ex and d['facepoints'] == facepoints
                        and d['insidepoints'] == insidepoints
                        and d['fname'] == 'f18'):
                    f18file = d['outfile']

                if (d['exp'] == ex and d['facepoints'] == facepoints
                        and d['insidepoints'] == insidepoints
                        and d['fname'] == 'f20'):
                    f20file = d['outfile']
            if not os.path.exists(f18file):
                print("f18 file not found: %s" % (f18file))
                exit(1)
            if not os.path.exists(f20file):
                print("f20 file not found: %s" % (f20file))
                exit(1)
            import json
            if f18file:
                with open(f18file, 'r') as fn:
                    datastore = json.load(fn)
            f18iter.append(np.log10(len(datastore['iterationinfo'])))
            if f20file:
                with open(f20file, 'r') as fn:
                    datastore = json.load(fn)
            f20iter.append(np.log10(len(datastore['iterationinfo'])))

        Y18m.append(np.average(f18iter))
        Y18s.append(np.std(f18iter))
        Y20m.append(np.average(f20iter))
        Y20s.append(np.std(f20iter))

    print(X)
    print(Y18m)
    print(Y20m)
    # Y18m = np.log10(Y18m)
    # Y20m = np.log10(Y20m)
    import matplotlib.pyplot as plt
    plt.figure(0, figsize=(15, 10))
    plt.plot(X[1:], Y18m[1:], label='\\ref{fn:f18}')
    plt.errorbar(X[1:], Y18m[1:], yerr=Y18s[1:], fmt='-o')
    plt.plot(X[1:], Y20m[1:], label='\\ref{fn:f20}')
    plt.errorbar(X[1:], Y20m[1:], yerr=Y20s[1:], fmt='-o')
    # plt.xlabel("$x_1$",fontsize = 32)
    # plt.ylabel("$x_2$",fontsize = 32)
    plt.tick_params(labelsize=28)
    plt.savefig("../../log/facevsinnter.pdf", bbox_inches='tight')
示例#7
0
def runfacevsinner():
    def getData(X_train, fn, noisepct, seed):
        """
        TODO use eval or something to make this less noisy
        """
        from apprentice import testData
        if fn == "f18":
            Y_train = [testData.f18(x) for x in X_train]
        elif fn == "f20":
            Y_train = [testData.f20(x) for x in X_train]
        else:
            raise Exception("function {} not implemented, exiting".format(fn))
        np.random.seed(seed)

        stdnormalnoise = np.zeros(shape=(len(Y_train)), dtype=np.float64)
        for i in range(len(Y_train)):
            stdnormalnoise[i] = np.random.normal(0, 1)
        # return Y_train
        return np.atleast_2d(
            np.array(Y_train) * (1 + noisepct * stdnormalnoise))

    def getbox(f):
        minbox = []
        maxbox = []
        if (f == "f18"):
            minbox = [-0.95, -0.95, -0.95, -0.95]
            maxbox = [0.95, 0.95, 0.95, 0.95]
        elif (f == "f20"):
            minbox = [10**-6, 10**-6, 10**-6, 10**-6]
            maxbox = [4 * np.pi, 4 * np.pi, 4 * np.pi, 4 * np.pi]
        else:
            minbox = [-1, -1]
            maxbox = [1, 1]
        return minbox, maxbox

    from apprentice import tools
    data = {'info': []}
    import apprentice
    dim = 4
    seedarr = [54321, 456789, 9876512, 7919820, 10397531]

    m = 5
    n = 5
    tstimes = 2
    ts = "2x"
    # fname = "f20-"+str(dim)+"D_ts"+ts
    fname = "f20-" + str(dim) + "D"
    from apprentice import tools
    from pyDOE import lhs
    npoints = tstimes * tools.numCoeffsRapp(dim, [m, n])
    print(npoints)
    epsarr = []
    for d in range(dim):
        #epsarr.append((maxarr[d] - minarr[d])/10)
        epsarr.append(10**-6)

    facespctarr = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
    folder = "f18_f20_facevsinner"
    for numex, ex in enumerate(["exp1", "exp2", "exp3", "exp4", "exp5"]):
        seed = seedarr[numex]
        for facenum, facepct in enumerate(facespctarr):
            facepoints = int(np.ceil(npoints * facepct))
            insidepoints = int(npoints - facepoints)
            # print(insidepoints)
            # Generate inside points
            for fname in ['f18', 'f20']:
                Xmain = np.empty([0, dim])
                minarrinside = []
                maxarrinside = []
                minarr, maxarr = getbox(fname)
                if (insidepoints > 1):
                    for d in range(dim):
                        minarrinside.append(minarr[d] + epsarr[d])
                        maxarrinside.append(maxarr[d] - epsarr[d])
                    X = lhs(dim, samples=insidepoints, criterion='maximin')
                    s = apprentice.Scaler(np.array(X, dtype=np.float64),
                                          a=minarrinside,
                                          b=maxarrinside)
                    X = s.scaledPoints
                    Xmain = np.vstack((Xmain, X))

                #Generate face points
                perfacepoints = int(np.ceil(facepoints / (2 * dim)))
                if (perfacepoints > 1):
                    index = 0
                    for d in range(dim):
                        for e in [minarr[d], maxarr[d]]:
                            index += 1
                            np.random.seed(seed + index * 100)
                            X = lhs(dim,
                                    samples=perfacepoints,
                                    criterion='maximin')
                            minarrface = np.empty(shape=dim, dtype=np.float64)
                            maxarrface = np.empty(shape=dim, dtype=np.float64)
                            for p in range(dim):
                                if (p == d):
                                    if e == maxarr[d]:
                                        minarrface[p] = e - epsarr[d]
                                        maxarrface[p] = e
                                    else:
                                        minarrface[p] = e
                                        maxarrface[p] = e + epsarr[d]
                                else:
                                    minarrface[p] = minarr[p]
                                    maxarrface[p] = maxarr[p]
                            s = apprentice.Scaler(np.array(X,
                                                           dtype=np.float64),
                                                  a=minarrface,
                                                  b=maxarrface)
                            X = s.scaledPoints
                            Xmain = np.vstack((Xmain, X))
                Xmain = np.unique(Xmain, axis=0)
                X = Xmain
                # formatStr = "{0:0%db}"%(dim)
                # for d in range(2**dim):
                #     binArr = [int(x) for x in formatStr.format(d)[0:]]
                #     val = []
                #     for i in range(dim):
                #         if(binArr[i] == 0):
                #             val.append(minarr[i])
                #         else:
                #             val.append(maxarr[i])
                #     X[d] = val
                if not os.path.exists(folder + "/" + ex + '/benchmarkdata'):
                    os.makedirs(folder + "/" + ex + '/benchmarkdata',
                                exist_ok=True)
                noise = "0"
                noisestr, noisepct = getnoiseinfo(noise)

                Y = getData(X, fn=fname, noisepct=noisepct, seed=seed)
                infile = "%s/%s/benchmarkdata/%s%s_splitlhs_f%d_i%d.txt" % (
                    folder, ex, fname, noisestr, facepoints, insidepoints)
                print(infile)
                np.savetxt(infile, np.hstack((X, Y.T)), delimiter=",")

                folderplus = "%s/%s/%s%s_splitlhs" % (folder, ex, fname,
                                                      noisestr)
                fndesc = "%s%s_splitlhs_f%d_i%d" % (fname, noisestr,
                                                    facepoints, insidepoints)
                if not os.path.exists(folderplus + "/outrasip"):
                    os.makedirs(folderplus + "/outrasip", exist_ok=True)
                if not os.path.exists(folderplus + "/log/consolelograsip"):
                    os.makedirs(folderplus + "/log/consolelograsip",
                                exist_ok=True)
                m = str(m)
                n = str(n)
                consolelog = folderplus + "/log/consolelograsip/" + fndesc + "_p" + m + "_q" + n + "_ts2x.log"
                outfile = folderplus + "/outrasip/" + fndesc + "_p" + m + "_q" + n + "_ts2x.json"
                data['info'].append({
                    'exp': ex,
                    'fname': fname,
                    'outfile': outfile,
                    'facepoints': facepoints,
                    'insidepoints': insidepoints
                })
                penaltyparam = 0
                cmd = 'nohup python runrappsip.py %s %s %s %s Cp %f %s %s >%s 2>&1 &' % (
                    infile, fndesc, m, n, penaltyparam, folderplus, outfile,
                    consolelog)
                # print(cmd)
                # exit(1)
                os.system(cmd)

    import json
    with open(folder + "/data.json", "w") as f:
        json.dump(data, f, indent=4, sort_keys=True)