Esempio n. 1
0
    def forward(self, in_batch, training=False):
        if training:  #如果是训练阶段, 不做任何处理
            return training

        out, _ = utils.reduce(in_batch)
        out = np.argmax(out, axis=1)
        return out
    def local_search(self, x_initial, f_initial):
        # Initialise lists to store perturbed values and their fitnesses
        x_list, f_list = [], []
        # Iterate through each dimension of x
        for d in range(self.N_DIMS):
            # Try positive and negative perturbations
            for sign in [+1.0, -1.0]:
                # Make copy of current location and add perturbation
                xp = x_initial.copy()
                xp[d] += sign * self.delta
                # Make sure the perturbed x-value is within limits
                xp[xp > self.X_MAX] = self.X_MAX
                xp[xp < self.X_MIN] = self.X_MIN
                # Check if x is tabu
                if not self.x_in_stm(xp):
                    # Store and evaluate
                    x_list.append(xp)
                    f_list.append(self.evaluate_objective(xp))

        # If there are any non-tabu moves available:
        if u.reduce(len(x_list), len(f_list)) > 0:
            # Choose best allowed move:
            f_best = min(f_list)
            x_best = x_list[f_list.index(f_best)]
            # Record new base-point:
            self.new_base_point(x_best, f_best)
        # All moves are tabu; just return initial point
        else:
            x_best, f_best = x_initial, f_initial

        return x_best, f_best
Esempio n. 3
0
 def get_go_terms(self, aspect=None):
     return utils.reduce(lambda x, y: x | y, [
         set(
             map(
                 lambda e: e["GO_ID"],
                 db.goa_pdb.find({
                     "PDB_ID": self.pdb,
                     "Chain": locus.chain
                 }))) for locus in self.loci
     ], set())
Esempio n. 4
0
def main():

    utils.set_up_data_directories()

    snapshots = {}
    parameters = {}
    for dataset in config.datasets:
        # shape: N_h x N
        # i.e. #DOFs x #snapshots
        snapshots[dataset] = utils.load_snapshots(dataset)
        parameters[dataset] = utils.load_parameters(dataset)

    for component in config.components:
        assert config.datasets[0] == 'train', 'The first dataset must be train'
        print(f'\nComputing targets for component {component}')

        for dataset in config.datasets:
            # Snapshot matrix, non-centered
            S_n = utils.reduce(snapshots[dataset], component)

            if dataset == 'train':
                # Compute and store ..
                # .. mean and POD
                S_mean = np.mean(S_n, axis=1)
                S = np.array([col - S_mean for col in S_n.T]).T
                V, D = do_POD(S)
                utils.save_POD(V, D, S_mean, component)
                # .. scaler
                scaler = StandardScaler()
                scaler.fit(parameters[dataset])
                utils.save_scaler(scaler)
            else:
                # Compute centered snapshot matrix
                S = np.array([col - S_mean for col in S_n.T]).T

            # Now V, D, S_mean and scaler are available

            # Compute and store ..
            # .. features
            features = compute_features(scaler, parameters[dataset])
            utils.save_features(dataset, features)
            # .. targets
            targets = compute_targets(S, V, D)
            utils.save_targets(dataset, component, targets)
            # .. projection error
            err_POD_sq = compute_error_POD_sq(S, V, D)
            utils.save_error_POD_sq(dataset, component, err_POD_sq)
Esempio n. 5
0
    def reduceCoefficients(self):
        allTerms = self.num.terms + self.den.terms
        coefficients = list(
            term if utils.isnumeric(term) else term.coefficient
                for term in allTerms)

        def gcd_help(x, y):
            x, y = map(abs, (x, y))
            if y > x:
                return gcd_help(y, x)
            if y == 0:
                return x
            return gcd_help(y, x % y)
        gcd = reduce(gcd_help, coefficients)
        for term in allTerms:
            if utils.isnumeric(term):
                term /= gcd
            else:
                term.set_coefficient(term.coefficient / gcd)
        return self
    def reduceCoefficients(self):
        allTerms = self.num.terms + self.den.terms
        coefficients = list(term if utils.isnumeric(term) else term.coefficient
                            for term in allTerms)

        def gcd_help(x, y):
            x, y = map(abs, (x, y))
            if y > x:
                return gcd_help(y, x)
            if y == 0:
                return x
            return gcd_help(y, x % y)

        gcd = reduce(gcd_help, coefficients)
        for term in allTerms:
            if utils.isnumeric(term):
                term /= gcd
            else:
                term.set_coefficient(term.coefficient / gcd)
        return self
    def new_base_point(self, x, f=None):
        # Store location
        self.x_list.append(x)
        # If the fitness hasn't already been evaluated, then do so:
        if f is None: f = self.evaluate_objective(x)
        # Add to STM
        self.stm.append(x)
        # If the STM has exceeded max length, remove earliest items:
        while len(self.stm) > self.N_STM:
            self.stm.pop(0)
        # Check if x is in MTM:
        if not self.x_in_mtm(x):
            self.mtm_x.append(x)
            self.mtm_f.append(f)
            # If the MTM has exceeded max length, remove least fit items:
            while u.reduce(len(self.mtm_x), len(self.mtm_f)) > self.N_MTM:
                i_worst = self.mtm_f.index(max(self.mtm_f))
                self.mtm_x.pop(i_worst)
                self.mtm_f.pop(i_worst)

        return f
Esempio n. 8
0
    V = utils.load_POD_V(component)
    S_mean = utils.load_POD_S_mean(component)
    D, denom_sq = utils.load_POD_D_and_denom_sq(component)

    model_constructor = utils.models[model_key]
    model = model_constructor()
    model.load(utils.model_dir, component)
    outputs = model.evaluate(features)

    for i, idx in enumerate(idxs):
        ## Infer reduced coefficients
        y_s = outputs[i]
        ## Load truth
        snapshot = utils.load_snapshot(dataset, idx)
        ## Mask other components
        solution_TRUE = utils.reduce(snapshot, component)
        ## Project coefficients back to full solution space (Eq.
        solution_PRED = S_mean + np.matmul(V, np.matmul(np.diag(D), y_s))
        solution_PROJ = S_mean + np.matmul(
            V, np.matmul(V.T, solution_TRUE - S_mean))

        eps_pod = (((solution_TRUE - solution_PROJ)**2).sum() / denom_sq)**.5
        eps_reg = (((solution_PRED - solution_PROJ)**2).sum() / denom_sq)**.5
        eps_podreg = (((solution_TRUE - solution_PRED)**2).sum() /
                      denom_sq)**.5
        #print(f'Sanity check for {idx}: \t{eps_pod}^2 + \t{eps_reg}^2 =
        #   \t{eps_pod**2+eps_reg**2} \t vs \t{eps_podreg**2}={eps_podreg}^2')

        if idx in predictions:
            predictions[idx] += utils.expand(solution_PRED, component)
            projections[idx] += utils.expand(solution_PROJ, component)
dep['weeknum'] = pd.to_datetime(dep.index)
dep['weeknum'] = dep.weeknum.dt.weekofyear

wk = dep.resample('W').mean()
wd = dep.groupby(dep.weekday).mean()

# save the mean and stdv for reconstruction
std_wk = wk.std(axis=0)
med_wk = wk.mean(axis=0)
std_wd = wd.std(axis=0)
med_wd = wd.mean(axis=0)

# --- low-frequency trends:
#     smooth out the normalized station-average
#     by one of 2 Fourier transform methods
rwk = utils.reduce(data=wk)
weeknum_trend = rwk.mean(axis=1)

# method 1: by clearing high-frequency modes
wnts_1 = utils.smooth_method_1(lca=weeknum_trend, fs=4)

# method 2: by taking the strongest mode and then reconstructing back
wnts_2 = utils.smooth_method_2(lca=weeknum_trend, n_harm=4)

fig = plt.figure(figsize=(12, 6))
for r in range(rwk.shape[1]):
    plt.scatter(x=np.arange(weeknum_trend.size),
                y=rwk.values[:, r],
                marker='o',
                color='k',
                s=30,
Esempio n. 10
0
def main(args):
    logger.debug('Connecting to %s:%d', args.hostname, args.port)
    sock = socket(AF_INET, SOCK_STREAM)
    sock.connect((args.hostname, args.port))

    logger.debug('Connected to %s:%d', args.hostname, args.port)
    json_msg = json.loads('{"task":"register", "id":' + str(args.port) + '}')

    sock.send(json.dumps(json_msg).encode('UTF-8'))
    backup = None

    while True:

        req = sock.recv(8000000)  # test for different size values
        timeout = 0

        if not req:
            time.sleep(4)

            if backup != None:

                while True:
                    try:
                        if timeout >= 60:  # try to connect to backup
                            logger.info("Timed out - giving up connection..")
                            sock.close()
                            return
                        logger.debug('Connecting to %s:%d', backup[0],
                                     backup[1])
                        sock = socket(AF_INET, SOCK_STREAM)
                        sock.connect((backup[0], backup[1]))

                        logger.debug('Connected to %s', (backup[0], backup[1]))
                        json_msg = json.loads('{"task":"register", "id":' +
                                              str(backup[1]) + '}')

                        sock.send(json.dumps(json_msg).encode('UTF-8'))
                        backup = None  # if we get here it means we already connected to the backup
                        break
                    except:
                        logger.info(
                            "Coordinator died, attempting to reconnect")
                        time.sleep(1)
                        timeout = timeout + 1

            else:
                while True:
                    try:
                        if timeout >= 60:  # try and connect to home addr
                            logger.info("Timed out - giving up connection..")
                            sock.close()
                            return
                        logger.debug('Connecting to %s:%d', args.hostname,
                                     args.port)
                        sock = socket(AF_INET, SOCK_STREAM)
                        sock.connect((args.hostname, args.port))

                        logger.debug('Connected to %s',
                                     (args.hostname, args.port))
                        json_msg = json.loads('{"task":"register", "id":' +
                                              str(8765) + '}')

                        sock.send(json.dumps(json_msg).encode('UTF-8'))
                        backup = None  # if we get here it means we already connected to the backup
                        break
                    except:
                        logger.info(
                            "Coordinator died, attempting to reconnect")
                        time.sleep(1)
                        timeout = timeout + 1

        else:
            decoded = req.decode()
            str_req = json.loads(decoded)

            if str_req['task'] == 'map_request':
                logger.debug('Received a map_request, sending a map reply')
                blob = str_req['blob']
                map = []

                for w in blob:
                    if w:
                        map.append((w, 1))

                j = {"task": "map_reply", "value": map}
                json_msg = json.dumps(j).encode('latin-1')

                sock.send(json_msg)

            elif str_req['task'] == 'reduce_request':
                logger.debug(
                    'Received a reduce_request sending a reduce reply')
                val = reduce(str_req['value'][0], str_req['value'][1])
                j = {"task": "reduce_reply", "value": val}
                json_msg = json.dumps(j).encode('latin-1')

                sock.send(json_msg)

            elif str_req['task'] == 'backup_update':
                backup = str_req['c']
                logger.info("Acknowledged the backup address")

            elif str_req['task'] == 'die':
                j = {"task": "die", "value": val}
                json_msg = json.dumps(j).encode('latin-1')

                sock.send(json_msg)
                sys.exit()

            else:
                logger.debug('Error: Unrecognized operation')
Esempio n. 11
0
Y = np.logical_xor(X[:, 0] > 0, X[:, 1] > 0)

# Fit the model
all_mean_fit_time, all_grid_search, test_score, best_parameters = fit_model(
    X, Y, parameters, 'All')
print("All - Best Test Score:", test_score)
print("All - Best Parameters:", best_parameters)

# Create datapoints plots
make_plots(X, Y, all_grid_search, 'All')

# Create scores heatmap
make_score_heatmap(parameters, param1, param2, all_grid_search, 'All')

# Get the reduced dataset
Xred, Yred, rtime = reduce(X, Y, Kneighbors, Kestimate)

# Fit the model on the reduced dataset
reduced_mean_fit_time, reduced_grid_search, test_score, best_parameters = fit_model(
    Xred, Yred, parameters, 'Reduced')
print("Reduced - Best Test Score:", test_score)
print("Reduced - Best Parameters:", best_parameters)
print("Reduce Time(sec):", rtime)

# Create datapoints plots
make_plots(Xred, Yred, reduced_grid_search, 'Reduced')

# Create scores heatmap
make_score_heatmap(parameters, param1, param2, reduced_grid_search, 'Reduced')

# Create boxplots of the training times