def setInitState(self, initState=None): self.gameState = initState self.stateToFeatures() if self.algo == "dqn": self.algo = algorithms.Algorithms("dqn",self.state.size,self.numAngles*self.numForces,self.state) elif self.algo == "semi-grad-sarsa": self.algo = algorithms.Algorithms("semi-grad-sarsa",self.state.size,self.numAngles*self.numForces,self.state, numAngles=self.numAngles, numForces=self.numForces) elif self.algo == "closest-greedy": self.algo = algorithms.Algorithms("closest-greedy",self.state.size,self.numAngles*self.numForces,self.state, numAngles=self.numAngles, numForces=self.numForces) elif self.algo == "random": self.algo = algorithms.Algorithms("random",self.state.size,self.numAngles*self.numForces,self.state, numAngles=self.numAngles, numForces=self.numForces)
def TestCorrelation(item_user_matrix, user): algo = algorithms.Algorithms() algo.setMatrix(item_user_matrix) # get user items all_user_items = item_user_matrix.iloc[ item_user_matrix.index.get_loc(user), :] unique_user_items = all_user_items.index[all_user_items.notnull()] all_recommended_items = [] # go over each item the user has rated for item in unique_user_items: results = algo.calculateCorrelation(item) # add only those results that are unique for result in results: if result not in all_recommended_items: all_recommended_items.append(result) # remove items the user has already seen for already_seen_item in unique_user_items: if already_seen_item in all_recommended_items: all_recommended_items.remove(already_seen_item) return all_recommended_items
def UpdateVideoItem(self, item): """ Accepts an item. It returns an updated item. """ #logFile.debug('starting UpdateVideoItem for %s (%s)',item.name, self.channelName) item.thumb = self.CacheThumb(item.thumbUrl) # open the url to read the media url data = uriHandler.Open(item.url, pb=False) # create the algorithm helper algHelper = algorithms.Algorithms() results = common.DoRegexFindAll(self.mediaUrlRegex, data) megavideoResults = common.DoRegexFindAll( '<param name="movie" value="([^"]+)"></param>', data) veohResults = common.DoRegexFindAll( 'src="http://www.veoh.com/[^?]+\?permalinkId=([^&]+)', data) googleResults = common.DoRegexFindAll( '(http://video.google.com/googleplayer.swf\?docId=[^"]+)"', data) # First give it a try using the default regex if len(results) > 0: item.mediaurl = results[-1] logFile.debug("MediaUrl found: %s", item.mediaurl) item.complete = True # If there were no results, try megavideo.com elif len(megavideoResults) > 0: url = megavideoResults[-1] url = algHelper.DecodeItemUrl(url) data = uriHandler.Open(url, pb=True) item.mediaurl = algHelper.ExtractMediaUrl(url, data) # then try veoh elif len(veohResults) > 0: url = "http://www.veoh.com/videos/%s?cmpTag" % veohResults[-1] url = algHelper.DecodeItemUrl(url) data = uriHandler.Open(url, pb=True) item.mediaurl = algHelper.ExtractMediaUrl(url, data) # then google elif len(googleResults) > 0: url = googleResults[-1] url = algHelper.DecodeItemUrl(url) data = uriHandler.Open(url, pb=True) item.mediaurl = algHelper.ExtractMediaUrl(url, data) # If all else fails, return an error else: item.mediaurl = "" logFile.error("MediaUrl not found in url: %s", item.url) item.complete = False logFile.debug("%s was updated with mediaurl: %s", item.name, item.mediaurl) return item
def get_keyword(): if not request.json['title'] and not request.json[ 'description'] and not request.json['uniqueCrawledId']: return abort( 400, errors="You need to send paramter text with query Id or requestId") query_str = request.json['title'] data_id = request.json['description'] crawledId = request.json['uniqueCrawledId'] return json.dumps(algo.Algorithms().initialize(query_str, data_id, crawledId))
def test(): if not request.args.get('text'): return json.dumps({"error": "text parameter is missing."}) query_str = request.args.get('text') data_id = request.args.get('amazonDataId') if ( request.args.get('amazonDataId')) else 111111 crawledId = request.args.get('uniqueCrawledId') if ( request.args.get('uniqueCrawledId')) else 000000 type = 'D' if (request.args.get('type') and request.args.get('type') == 'D') else 'T' return json.dumps(algo.Algorithms().process_algorithm( query_str, data_id, crawledId, 0, type))
def __init__(self, nodeDict, adjMatrix, instanceName, solution): self.nodeDict = nodeDict self.adjMatrix = adjMatrix self.counts = len(nodeDict) self.edgeDict = {} self.instanceName = instanceName self.solution = solution for i in range(self.counts): for j in range(i + 1, self.counts): vertices = (i, j) self.edgeDict[vertices] = self.adjMatrix[i, j] self.Bounds = bounds.Bounds(self.nodeDict, self.adjMatrix) self.solutions = algorithms.Algorithms(self.nodeDict, self.adjMatrix, self.counts, self.edgeDict)
def CenteredCosineSimilarity(item_user_matrix, user): #initate class algo = algorithms.Algorithms() algo.setMatrix(item_user_matrix) # get user items unique_user_items = item_user_matrix[user][ item_user_matrix[user].notnull()] unique_user_items = unique_user_items.index all_recommended_items = [] for item in unique_user_items: results = algo.calculateCosineSimilarity(item) # add only those results that are unique for result in results[0]: if result not in all_recommended_items: all_recommended_items.append(result) # remove items the user has already seen for already_seen_item in unique_user_items: if already_seen_item in all_recommended_items: all_recommended_items.remove(already_seen_item) return all_recommended_items
print("Digits dataset") print("************") # Loading the dataset digits = datasets.load_digits() x = digits.data y = digits.target # Standardizing data sc_x = StandardScaler() x_std = sc_x.fit_transform(x) # Splitting data x_std_tr, x_std_ts, y_tr, y_ts = train_test_split(x_std, y, test_size=0.3, random_state=1) algorithm = algorithms.Algorithms(n_components=2, kernel="rbf", gamma=15, c=100, seed=1, x_train=x_std_tr, y_train=y_tr, x_test=x_std_ts) # ## Regular Logistic Regression ## orig_logreg_y_ts_pred = algorithm.run_logisticreg() accuracy = np.sum(orig_logreg_y_ts_pred == y_ts) / len(y_ts) print("LogReg accuracy before dim reduction: " + str(accuracy)) for name in ["PCA", "LDA", "Kernel_PCA"]: print("\n\n" + name) # Reducing the dimensions x_tr_reduc, x_ts_reduc = algorithm.call("run_" + name.lower()) # Logistic Regression on reduced dimensions algorithm.x_tr, algorithm.x_ts = x_tr_reduc, x_ts_reduc logreg_y_ts_pred = algorithm.run_logisticreg()