Пример #1
0
def main():
    scraper = Scraper(website_page=website_page, headers=headers)

    # Get data from the website.
    scraper.get_record_player_data()

    # Save data to database.
    data = scraper.record_players_data
    for row in data:
        sq.insert_record_player(row)

    # Draw plot.
    pl.draw()

    # Close database.
    sq.conn.close()
Пример #2
0
    def test_draw(self):
        line = Segment(length=100.0)
        arc = Segment(arc=math.radians(90), radius=50.0)

        track = Track([line, arc] * 4, width=20)
        fig = draw(track)
        with tempfile.TemporaryFile() as f:
            fig.savefig(f, format='png')
Пример #3
0
def main(argv):
    global appInfo, config_file, groundTruth_profile, knob_samples, knobs, mode, model

    # parse the argument
    parser = declareParser()
    options, args = parser.parse_args()
    # insert to global variables
    parseCMD(options)

    prepare()

    if (mode == "machine"):
        machineTrainer = MachineTrainer()
        machineTrainer.train()
        return

    if (mode == "finalize"):  # TODO:need to update appINFO related
        # parse the run_config
        with open(run_config_path) as config_json:
            config = json.load(config_json)
            methods_path = config['appMet']
            cost_rsdg = Classes.pieceRSDG()
            cost_rsdg.fromFile(config['cost_rsdg'])
            mv_rsdgs = []
            preferences = config['preferences']
            desc = config['desc']
            lvl = config['seglvl']
            for rsdg_path in config['mv_rsdgs']:
                rsdg = Classes.pieceRSDG()
                rsdg.fromFile(rsdg_path)
                mv_rsdgs.append(rsdg)
        module = imp.load_source("", methods_path)
        appMethod = module.appMethods("", obj_path)

        knobs, groundTruth_profile, knob_samples = genTrainingSet(
            appInfo.DESC, GRANULARITY)
        xml = xmlgen.genxml(appInfo.APP_NAME, "", "", True, appInfo.DESC, True)
        factfile, mvfactfile = genFactWithRSDG(appname, groundTruth_profile,
                                               cost_rsdg, mv_rsdgs, appMethod,
                                               preferences)
        readFact(factfile, knobs, groundTruth_profile)
        readFact(mvfactfile, knobs, groundTruth_profile, False)
        groundTruth_profile.printProfile("./outputs/" + appname + 'gen' +
                                         ".profile")
        cost_rsdg, mv_rsdgs, cost_path, mv_paths, seglvl = constructRSDG(
            groundTruth_profile, knob_samples, THRESHOLD, knobs, True, model,
            None, lvl)
        finalized_xml = completeXML(appname, xml, cost_rsdg, mv_rsdgs[0],
                                    model, True)
        # append the xml path to the file
        config['finalized_rsdg'] = os.path.abspath(finalized_xml)
        config_file = open(run_config_path, 'w')
        json.dump(config, config_file, indent=2, sort_keys=True)
        config_file.close()
        return

    if (mode == "optimality"):
        # parse the run_config
        with open(run_config_path) as config_json:
            config = json.load(config_json)
            methods_path = config['appMet']
            cost_rsdg = Classes.pieceRSDG()
            cost_rsdg.fromFile(config['cost_rsdg'])
            mv_rsdgs = []
            preferences = config['preferences']
            desc = config['desc']
            lvl = config['seglvl']
            for rsdg_path in config['mv_rsdgs']:
                rsdg = Classes.pieceRSDG()
                rsdg.fromFile(rsdg_path)
                mv_rsdgs.append(rsdg)
        module = imp.load_source("", methods_path)
        appMethod = module.appMethods("", obj_path)

        appname, knobs, groundTruth_profile, knob_samples = genTrainingSet(
            desc, GRANULARITY)
        appname = appname[:-1]
        xml = xmlgen.genxml(appname, "", "", True, desc, True)
        max_cost = appMethod.max_cost
        min_cost = appMethod.min_cost
        step_size = (max_cost - min_cost) / 20.0
        default_optimals = []
        # first get all the pareto-optimal points in default settings
        for pref in preferences:
            pref = 1.0
        factfile, mvfactfile = genFactWithRSDG(appname, groundTruth_profile,
                                               cost_rsdg, mv_rsdgs, appMethod,
                                               preferences)
        readFact(factfile, knobs, groundTruth_profile)
        readFact(mvfactfile, knobs, groundTruth_profile, False)
        for percentage in range(1, 20):
            budget = (min_cost + float(percentage) * step_size)
            default_optimals.append(
                findOptimal(groundTruth_profile, budget)[0])
        # now iterate through all possible preferences
        mv_under_budget = {}
        id = 0
        for submetric in preferences:
            for pref in range(1, 10, 1):
                # setup new preferences
                new_pref = []
                for i in range(0, len(preferences)):
                    if i == id:
                        new_pref.append(1.0 + float(pref))
                    else:
                        new_pref.append(1.0)
                factfile, mvfactfile = genFactWithRSDG(appname,
                                                       groundTruth_profile,
                                                       cost_rsdg, mv_rsdgs,
                                                       appMethod, new_pref)
                readFact(factfile, knobs, groundTruth_profile)
                readFact(mvfactfile, knobs, groundTruth_profile, False)
                # calculate the new optimal configs
                mv_loss = []
                for percentage in range(1, 20):
                    budget = (min_cost + float(percentage) * step_size)
                    real_optimal = findOptimal(groundTruth_profile, budget)[1]
                    default_optimal = \
                        groundTruth_profile.getMV(
                            default_optimals[percentage - 1])[
                            -1]
                    mv_loss.append(default_optimal / real_optimal)
                # get the preference in string
                pref_string = "-".join(list(map(lambda x: str(x), new_pref)))
                mv_under_budget[pref_string] = mv_loss
            id += 1
        # report only the worst case
        worst_cases = []
        for cur_budget in range(0, 19):
            cur_worst = 100
            for item, values in mv_under_budget.items():
                if values[cur_budget] < cur_worst:
                    cur_worst = values[cur_budget]
            worst_cases.append(cur_worst)
        return

    if mode == "standard":
        # initialize the app summary info
        appInfo = AppSummary(config_file)
        appInfo.printSummary()
        appname = appInfo.APP_NAME
        # ######################STAGE-1########################
        # generate initial training set

        knobs, groundTruth_profile, knob_samples, bb_profile = genTrainingSet(
            appInfo.DESC, GRANULARITY)
        fulltraining_size = len(groundTruth_profile.configurations)
        bb_size = len(bb_profile.configurations)
        # generate the structural RSDG files (XML)
        xml = xmlgen.genxml(appInfo.APP_NAME, "", "", True, appInfo.DESC)
        if (stage == 1):
            return

        # ######################STAGE-2########################
        # train the application

        # load user-supplied methods
        time_record = genFact(appInfo, groundTruth_profile, bb_profile,
                              NUM_OF_FIXED_ENV)
        print("here")
        exit(1)
        # ######################STAGE-3########################
        # third stage: Modeling, use the specific modeling method to construct
        # the RSDG
        readFact(appInfo.FILE_PATHS['COST_FILE_PATH'], knobs,
                 groundTruth_profile)
        readFact(appInfo.FILE_PATHS['COST_FILE_PATH'], knobs, bb_profile)
        if appInfo.TRAINING_CFG['withQoS']:
            readFact(appInfo.FILE_PATHS['MV_FILE_PATH'], knobs,
                     groundTruth_profile, False)
            readFact(appInfo.FILE_PATHS['MV_FILE_PATH'], knobs, bb_profile,
                     False)
        groundTruth_profile.printProfile(appInfo.FILE_PATHS['PROFILE_PATH'])
        # if it's just model validation
        if appInfo.TRAINING_CFG['validate']:
            rs_configs = representset.getConfigurationsFromFile(
                appInfo.VALIDATE_RS_PATH, knobs)
            representset.validateRS(groundTruth_profile, rs_configs)
            return
        # construct the cost rsdg iteratively given a threshold
        if not model == "offline":
            cost_rsdg, mv_rsdgs, cost_path, mv_paths, seglvl, training_time, rsp_size = constructRSDG(
                groundTruth_profile, knob_samples, THRESHOLD, knobs, True,
                model, time_record)
        #cost_bb_rsdg, mv_bb_rsdgs, cost_bb_path, mv_bb_paths, seglvl_bb, training_time_full, rsp_bb_size = constructRSDG(
        #    bb_profile,
        #    knob_samples,
        #    THRESHOLD,
        #    knobs,
        #    True,
        #    model,
        #    time_record,
        #    KDG=False)
        #training_time.update(training_time_full)

        # Generate the representative set
        rss_size = 0
        rss_bb_size = 0
        rs = []
        rs_bb = []
        if appInfo.TRAINING_CFG['calRS'] or RSRUN:
            rs, mean = representset.genContRS(groundTruth_profile,
                                              RS_THRESHOLD)
            #rs_bb, mean_bb = representset.genContRS(bb_profile,
            #                                        RS_THRESHOLD,
            #                                        KDG=False)
            # generate the rsdg recorvered by rs
            subprofile, partitions = groundTruth_profile.genRSSubset(rs)
            costrsdg_rs, mvrsdgs_rs, costpath_rs, mvpaths_rs = populate(
                subprofile, partitions, model, KDG=False, RS=True)
            rss_size = len(rs)
            rss_bb_size = len(rs_bb)
            with open("./outputs/" + appname + '/' + appname + ".rs",
                      'w') as result:
                for config in rs:
                    result.write(config.printSelf(",") + "\n")
                result.write(str(mean))
            with open("./outputs/" + appname + '/' + appname + "_bb.rs",
                      'w') as result:
                for config in rs_bb:
                    result.write(config.printSelf(",") + "\n")
                result.write(str(mean))
            # calculate training time for RS
            if time_record is not None:
                total_time = 0.0
                total_time_bb = 0.0
                for config in rs:
                    if config.printSelf("-") in time_record:
                        total_time += time_record[config.printSelf("-")]
                for config in rs_bb:
                    if config.printSelf("-") in time_record:
                        total_time_bb += time_record[config.printSelf("-")]
                training_time['RS'] = total_time
                training_time['RS_bb'] = total_time_bb
        # write training time to file
        if time_record is not None and not model == "offline":
            with open('./outputs/' + appname + '/' + 'time_compare.txt',
                      'w') as file:
                file.write(json.dumps(training_time, indent=2, sort_keys=True))
        if PLOT:
            draw("outputs/modelValid.csv")
        if not model == "offline":
            with open("./outputs/" + appname + '/training_size.txt',
                      'w') as ts:
                ts.write("full:" + str(bb_size) + "\n")
                ts.write("kdg:" + str(fulltraining_size) + "\n")
                ts.write("rsp:" + str(rsp_size) + "\n")
                ts.write("rss:" + str(rss_size) + "\n")
            #ts.write("rsp_bb:" + str(rsp_bb_size) + "\n")
            #ts.write("rss_bb:" + str(rss_bb_size) + "\n")

        # ######################STAGE-4########################
        # forth stage, generate the final RSDG in XML format
        if not model == "offline":
            if not RSRUN:
                default_xml_path = completeXML(appInfo.APP_NAME, xml,
                                               cost_rsdg, mv_rsdgs[-1], model)
            else:
                default_xml_path = completeXML(appInfo.APP_NAME, xml,
                                               costrsdg_rs, mvrsdgs_rs[-1],
                                               model)
                cost_path = costpath_rs
                mv_paths = mvpaths_rs

        # cleaning
        try:
            os.system("rm *.log")
        except:
            pass

        # write the generated RSDG back to desc file
        if True:
            with open(
                    appInfo.OUTPUT_DIR_PREFIX + appInfo.APP_NAME +
                    "_run.config", 'w') as runFile:
                run_config = OrderedDict()
                # for missions
                run_config['basic'] = {}
                basic_config = run_config['basic']
                basic_config['app_name'] = appInfo.APP_NAME
                basic_config['cost_path'] = os.path.abspath(
                    appInfo.FILE_PATHS['COST_FILE_PATH'])
                basic_config['mv_path'] = os.path.abspath(
                    appInfo.FILE_PATHS['MV_FILE_PATH'])
                basic_config[
                    'defaultXML'] = "outputs/" + appInfo.APP_NAME + '-default.xml' if model == "offline" else os.path.abspath(
                        default_xml_path)
                # some extra
                run_config['mission'] = {}
                mission_config = run_config['mission']
                mission_config['budget'] = 0.0
                mission_config['UNIT_PER_CHECK'] = 0
                mission_config['OFFLINE_SEARCH'] = False
                mission_config['REMOTE'] = False
                mission_config['GUROBI'] = True
                mission_config['CONT'] = True
                mission_config['RAPID_M'] = False
                mission_config['RUSH_TO_END'] = False
                mission_config['MISSION_LOG'] = True
                mission_config['budget'] = 0.0

                # for custom qos
                run_config[
                    'cost_rsdg'] = "" if model == "offline" else os.path.abspath(
                        cost_path)
                run_config['mv_rsdgs'] = [] if model == "offline" else list(
                    map(lambda x: os.path.abspath(x), mv_paths[0:-1]))
                run_config[
                    'mv_default_rsdg'] = [] if model == "offline" else os.path.abspath(
                        mv_paths[-1])
                run_config['appMet'] = os.path.abspath(appInfo.METHODS_PATH)
                run_config['rapidScript'] = os.path.abspath('./rapid.py')
                run_config['seglvl'] = 0 if model == "offline" else seglvl
                run_config['desc'] = os.path.abspath(appInfo.DESC)
                run_config['preferences'] = [] if model == "offline" else list(
                    map(lambda x: 1.0, mv_paths[0:-1]))
                json.dump(run_config, runFile, indent=2)
                runFile.close()

        if (stage == 4):
            return

        module = imp.load_source("", appInfo.METHODS_PATH)
        appMethods = module.appMethods(appInfo.APP_NAME, appInfo.OBJ_PATH)
        # update the min/max value
        updateAppMinMax(appInfo, appMethods)
        # set the run config
        appMethods.setRunConfigFile(appInfo.OUTPUT_DIR_PREFIX +
                                    appInfo.APP_NAME + "_run.config")

        if appInfo.TRAINING_CFG['qosRun']:
            if model == 'offline':
                genOfflineFact(appInfo.APP_NAME)
            report = appMethods.qosRun(OFFLINE=model == "offline")
            output_name = './outputs/' + appname + "/qos_report_" + appInfo.APP_NAME + "_" + model + ".csv"
            columns = [
                'Percentage', 'MV', 'Augmented_MV', 'Budget', 'Exec_Time'
            ]
            with open(output_name, 'w') as output:
                writer = csv.DictWriter(output, fieldnames=columns)
                writer.writeheader()
                for data in report:
                    writer.writerow(data)

        if appInfo.TRAINING_CFG['overheadRun'] and model == 'piecewise':
            for budget in OVERHEAD_RUN_BUDGETS:
                report = appMethods.overheadMeasure(budget)
                #output_name = './outputs/' + appname + "/overhead_report_" + appInfo.APP_NAME + "_" + str(budget) + ".csv"
                output_name = './outputs/' + appname + "/overhead_report_" + appInfo.APP_NAME + "_" + str(
                    budget) + ".csv"
                columns = [
                    'Unit', 'MV', 'Augmented_MV', 'Budget', 'Exec_Time',
                    'OverBudget', 'RC_TIME', 'RC_NUM', 'RC_by_budget',
                    'SUCCESS', 'overhead_pctg'
                ]
                with open(output_name, 'w') as output:
                    writer = csv.DictWriter(output, fieldnames=columns)
                    writer.writeheader()
                    for data in report:
                        writer.writerow(data)

    # ######SOME EXTRA MODES#############

    if (mode == "genrs"):
        if (rs == "set"):
            genRS(fact, True, targetMax, targetMean)
        else:
            genRS(fact, False, targetMax, targetMean)
        return 0

    if (mode == "consrsdg"):  # construct RSDG based on observation of RS
        populateRSDG(observed, fact, False, "linear", remote)
        return 0

    if (mode == "conscontrsdg"):  # construct RSDG based on observation of RS
        # get an observed file by randomly select some observation
        populateRSDG(observed, fact, True, model, remote)
        return 0
    return 0
Пример #4
0
def main(C=1.0, max_iter=100, tol = 0.001, show_plot=False):
     
    alpha_sparse = {} # current sparse alpha list
    alpha_s_prim = {} # previous sparse alpha list
     
    print >> sys.stderr, "loading examples..."
    height_e, width_e, cs,es = read_examples(sys.stdin)
     
    print >> sys.stderr, "example matrix: " , height_e, ",", width_e
    print >> sys.stderr, "kernelizing..."
    q = compute_Q(es,height_e)
     
    alpha_s = init_alpha(height_e)
    bias = 0
    # stochastic gradient descent
    for i in range(max_iter):
        print >> sys.stderr, "¥nnew iteration:", i+1
        gamma = 1
         
        # sort alpha list in reversed order
        alpha_s.sort(None, None, True)
        print >> sys.stderr, alpha_s[0:30]
        print >> sys.stderr, 'sparsity: ', len(alpha_sparse),':',height_e
         
        alpha_s_prim = alpha_sparse.copy()
         
        z_max = float("-infinity"); z_min= float("infinity")
         
        for id in range(len(alpha_s)):
            # update from the largest alpha
            alpha = alpha_s[id][0]
            j = alpha_s[id][1]
            t = 0.0
             
            for k in alpha_sparse.keys():
                t += cs[k]* alpha_sparse[k] * ret_Q(q,j,k)
            # check z_max and z_min for bias computation
            if cs[j]>0:
                if t < z_min:
                    z_min = t
            else:
                if t > z_max:
                    z_max = t
                     
            learning_rate = gamma * (1/ret_Q(q,j,j))
            delta = learning_rate * ( 1 - t * cs[j] )
             
            # check for soft-margin
            alpha += delta
            if alpha < 0 :
                alpha = 0.0
            if alpha > C:
                alpha = C
             
            # do update foe dense alpha list
            alpha_s[id] = alpha,j
             
            # do update for sparse alpha list
            if math.fabs(alpha - 0.0) >= 1e-10:
                alpha_sparse[j]=alpha
            else:
                if j in alpha_sparse:
                    del alpha_sparse[j]
        # get bias
        bias = (z_max+z_min)/2.0
 
        # chekc for tolerance
        tol1 = tolerance(alpha_sparse, alpha_s_prim, float(height_e))
             
        print >> sys.stderr, "tolerance:", tol1
        if tol1 < tol:
            print >> sys.stderr, "¥nfinished in",i+1,"iterations"
            break
     
    svm_res ={'sv_s':[],'id_s':[],'alpha_s':[]}
    # support vectors
    for id,alpha in alpha_sparse.items():
        svm_res['sv_s'].append(es[id])
        svm_res['id_s'].append(id)
        svm_res['alpha_s'].append(cs[id]*alpha)
    svm_res['bias'] = bias
    # plot graph if needed
    if show_plot:
        plot.draw(cs, es, svm_res)
    return svm_res
def test_grid(
        #image=random_image):
        #image=graded_image):
        #image=random_row):
        image=lum_img):
        #image=light_image):
        #image=dark_image):
    height = len(image)
    width = len(image[0])

    neuron_factory = NeuronFactory()
    photoreceptor_grid = neuron_factory.create_neuron_grid(width, height,
                        neuron_type=NeuronTypes.PHOTORECEPTOR, record=False)
    ganglion_grid = neuron_factory.create_neuron_grid(width, height, record=True)

    neuron_data = []

    # Create drivers and Connect ganglion cells.
    for i in xrange(height):
        #print(image[i][:width])
        for j in xrange(width):
            neuron_factory.create_synapse(photoreceptor_grid[i][j], ganglion_grid[i][j],
                strength=100)
            neuron_factory.register_driver(
                photoreceptor_grid[i][j],
                ConstantDriver(current=-image[i][j]*255, delay=10))
            #print(image[i][j]*255)

    for _ in xrange(args.iterations):
        neuron_factory.step()

    '''
    photo_activity = []
    for row in photoreceptor_grid:
        d = [neuron.get_record()[-1] for neuron in row]
        print(d)
        photo_activity.append(d)
        #print(d)
    '''

    ganglion_activity = []
    for row in ganglion_grid:
        #for neuron in row:
        #    plot([("ganglion", neuron.get_record())])
        d = [neuron.get_record(spikes=True) for neuron in row]
        #print([x for x in row[0].get_record()])
        ganglion_activity.append(d)

        #for neuron in row:
        #    ganglion_activity.append(neuron.get_record())

    maximum = max(max(row) for row in ganglion_activity)
    minimum = min(max(row) for row in ganglion_activity)
    for row in xrange(len(ganglion_activity)):
        print(ganglion_activity[row])
        for col in xrange(len(ganglion_activity[row])):
            ganglion_activity[row][col] = float(ganglion_activity[row][col])-minimum / maximum

    save(ganglion_activity)

    #draw((image, photo_activity, ganglion_activity), ("Input", "Photoreceptors", "Ganglion"))
    draw((image, ganglion_activity), ("Input", "Ganglion"))
    #draw((image, photo_activity), ("Input", "Photoreceptors"))
    #draw((image,), ("Input",))

    '''
Пример #6
0
def estimate_price(km, theta0, theta1):
    return theta0 + theta1 * km

def usage():
    print("Usage: " + sys.argv[0] + " [-b]")
    sys.exit(2)

if len(sys.argv) != 1 and (len(sys.argv) != 2 or sys.argv[1] != "-b"):
    usage()
bonus = True if len(sys.argv) == 2 else False
loop = True
while loop:
    try:
        km = float(input("km: "))
        loop = False
    except:
        print("km must be a number")
theta0, theta1 = file.read_theta()
data = file.read_data()
min_km = min(data, key = lambda t: t[0])[0]
max_km = max(data, key = lambda t: t[0])[0]
km = train.scale_km(km, min_km, max_km)
price = theta0 + (theta1 * km)
print ("estimated price: %.2f" % estimate_price(km, theta0, theta1))
if bonus:
    scaled_data = train.scale_data(data)
    p = precision.r_squared(scaled_data, theta0, theta1)
    print("precision: %.2f%%" % (p * 100))
    plot.draw(data, theta0, theta1)
Пример #7
0
 def onDragCompletion(self):
     print("START: (%d, %d), END: (%d, %d)" %
           (self.START_NODE[0], self.START_NODE[1], self.END_NODE[0],
            self.END_NODE[1]))
     setRect(self.START_NODE, self.END_NODE, self.mask)
     plot.draw()
Пример #8
0
def main(C=1.0, max_iter=100, tol = 0.001, show_plot=False):
    
    alpha_sparse = {} # current sparse alpha list
    alpha_s_prim = {} # previous sparse alpha list
    
    print(sys.stderr, "loading examples...")
    height_e, width_e, cs,es = read_examples(sys.stdin)
    
    print(sys.stderr, "example matrix: " , height_e, ",", width_e)
    print(sys.stderr, "kernelizing...")
    q = compute_Q(es,height_e)
    
    alpha_s = init_alpha(height_e)
    bias = 0
    # stochastic gradient descent
    for i in range(max_iter):
        print(sys.stderr, "\nnew iteration:", i+1)
        gamma = 1
        
        # sort alpha list in reversed order
        alpha_s.sort(None, None, True)
        print(sys.stderr, alpha_s[0:30])
        print(sys.stderr, 'sparsity: ', len(alpha_sparse),':',height_e)
        
        alpha_s_prim = alpha_sparse.copy()
        
        z_max = float("-infinity"); z_min= float("infinity")
        
        for id in range(len(alpha_s)):
            # update from the largest alpha
            alpha = alpha_s[id][0]
            j = alpha_s[id][1]
            t = 0.0
            
            for k in alpha_sparse.keys():
                t += cs[k]* alpha_sparse[k] * ret_Q(q,j,k)
            # check z_max and z_min for bias computation
            if cs[j]>0:
                if t < z_min:
                    z_min = t
            else:
                if t > z_max:
                    z_max = t
                    
            learning_rate = gamma * (1/ret_Q(q,j,j))
            delta = learning_rate * ( 1 - t * cs[j] )
            
            # check for soft-margin
            alpha += delta
            if alpha < 0 :
                alpha = 0.0 
            if alpha > C:
                alpha = C
            
            # do update foe dense alpha list
            alpha_s[id] = alpha,j
            
            # do update for sparse alpha list
            if math.fabs(alpha - 0.0) >= 1e-10:
                alpha_sparse[j]=alpha
            else:
                if j in alpha_sparse:
                    del alpha_sparse[j]
        # get bias
        bias = (z_max+z_min)/2.0

        # chekc for tolerance
        tol1 = tolerance(alpha_sparse, alpha_s_prim, float(height_e))
            
        print(sys.stderr, "tolerance:", tol1)
        if tol1 < tol:
            print(sys.stderr, "\nfinished in",i+1,"iterations")
            break
    
    svm_res ={'sv_s':[],'id_s':[],'alpha_s':[]}
    # support vectors
    for id,alpha in alpha_sparse.items():
        svm_res['sv_s'].append(es[id])
        svm_res['id_s'].append(id)
        svm_res['alpha_s'].append(cs[id]*alpha)
    svm_res['bias'] = bias
    # plot graph if needed
    if show_plot:
        plot.draw(cs, es, svm_res)
    return svm_res
Пример #9
0
                    del alpha_sparse[j]
        # get bias
        bias = (z_max+z_min)/2.0
 
        # chekc for tolerance
        tol1 = tolerance(alpha_sparse, alpha_s_prim, float(height_e))
             
        print >> sys.stderr, "tolerance:", tol1
        if tol1 < tol:
            print >> sys.stderr, "¥nfinished in",i+1,"iterations"
            break
     
    svm_res ={'sv_s':[],'id_s':[],'alpha_s':[]}
    # support vectors
    for id,alpha in alpha_sparse.items():
        svm_res['sv_s'].append(es[id])
        svm_res['id_s'].append(id)
        svm_res['alpha_s'].append(cs[id]*alpha)
    svm_res['bias'] = bias
 
    # plot graph if needed
    if show_plot:
        plot.draw(cs, es, svm_res)
    return svm_res
 
if __name__ == "__main__":
    t= main()
    print 'support vectors:', t['sv_s']
    print 'example IDs:', t['id_s']
    print 'lagrange multipliers:',t['alpha_s']
    print 'bias:', t['bias']
Пример #10
0
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt

import settings
import fin_conf
import plot

x = np.linspace(0.0, settings.CANVAS_SIZE_X, settings.CXN)
y = np.linspace(0.0, settings.CANVAS_SIZE_Y, settings.CYN)

iom = fin_conf.IO_Manager()
iom.connect(plot.fig)

plot.draw()

plt.show()
Пример #11
0
import parser
import calcmass
import plot

if __name__ == "__main__":
    fourv, mass, graph = parser.io()
    calcmass.calc(fourv, mass)
    plot.draw(mass, graph)
Пример #12
0
from copy import deepcopy
from plot import draw
from prgc import intersect_polygons
from helpers import *

# Load some sample polygons
dcel, polygon1, polygon2 = load_from_file("inputs/square_tri.txt")

p1_points = face_to_point_list(polygon1)
p2_points = face_to_point_list(polygon2)

# Intersect them
result = intersect_polygons(dcel, polygon1, polygon2)
result_points = [face_to_point_list(poly) for poly in result]

# Show the result
draw(p1_points, p2_points, result_points)