Esempio n. 1
0
def predict_rotten_tomatoes(movie):
    link = ''
    name_split = movie.name.split()
    for t in name_split:
        word = []
        word_as_list = list(t)
        # Only add word if alphanumeric
        for c in word_as_list:
            if c.isalnum():
                c = unidecode(c)
                c = c.lower()
                word.append(c)
        # Covert byte to string
        word_as_string = ''.join(word)
        # Rotten Tomatoes URLs are in the format: https://www.rottentomatoes.com/m/word1_word2_word3
        # Each word in the title separated by an underscore
        # No underscore at the end of URL
        link = ''.join([
            link, word_as_string, '_'
        ]) if len(word) > 0 else ''.join([link, word_as_string])
    new_link = urllib.parse.urljoin('https://www.rottentomatoes.com/m/',
                                    link[:-1])
    if not utils.check_link(new_link):
        link = ''.join([link[:-1], '_', movie.year])
        new_link = urllib.parse.urljoin('https://www.rottentomatoes.com/m/',
                                        link)

    Printer.print_minus(''.join([
        "MISSING ROTTEN TOMATOES: ", movie.name, ", Predicted Link: ", new_link
    ]))

    return new_link
Esempio n. 2
0
def use(user, _):
    """Use an Antidote."""
    if 'Poison' in user.stats['Effects']:
        user.stats['Effects'].remove('Poison')
        Printer.print_ui('  {} is no longer poisoned.'.format(user.name))
    else:
        Printer.print_ui('  It\'s ineffective!')
Esempio n. 3
0
def parse(file):
    films = {}
    with open(file) as json_file:
        data = json.load(json_file)
        for film in data['films']:
            movie_title = film['Name']
            movie_wiki_link = film['Link']
            movie = Movie(movie_wiki_link, movie_title)
            films[movie_title] = movie

            Printer.print_minus(''.join(
                ['PARSING: ', str(len(films)), ". ", movie_title]))

            if film['IMDB'] is not None:
                imdb_link = film['IMDB']['Link']
                movie.imdb = IMDB(imdb_link, movie_title)

            if film['Rotten Tomatoes'] is not None:
                rt_link = film['Rotten Tomatoes']['Link']
                movie.rotten_tomatoes = RottenTomatoes(rt_link, movie_title)

            if film['Metacritic'] is not None:
                meta_link = film['Metacritic']['Link']
                movie.metacritic = Metacritic(meta_link, movie_title)

            if film['Box Office Mojo'] is not None:
                bom_link = film['Box Office Mojo']['Link']
                movie.box_office_mojo = BoxOfficeMojo(bom_link)

    return films
Esempio n. 4
0
    def get_frame_covisibles(self, frame):
        points = frame.get_matched_good_points()
        #keyframes = self.get_local_keyframes()
        #assert len(points) > 0
        if len(points) == 0:
            Printer.red('get_frame_covisibles - frame with not points')

        # for all map points in frame check in which other keyframes are they seen
        # increase counter for those keyframes
        viewing_keyframes = [
            kf for p in points for kf in p.keyframes() if not kf.is_bad
        ]  # if kf in keyframes]
        viewing_keyframes = Counter(viewing_keyframes)
        kf_ref = viewing_keyframes.most_common(1)[0][0]
        #local_keyframes = viewing_keyframes.keys()

        # include also some not-already-included keyframes that are neighbors to already-included keyframes
        for kf in list(viewing_keyframes.keys()):
            second_neighbors = kf.get_best_covisible_keyframes(
                Parameters.kNumBestCovisibilityKeyFrames)
            viewing_keyframes.update(second_neighbors)
            children = kf.get_children()
            viewing_keyframes.update(children)
            if len(viewing_keyframes
                   ) >= Parameters.kMaxNumOfKeyframesInLocalMap:
                break

        local_keyframes_counts = viewing_keyframes.most_common(
            Parameters.kMaxNumOfKeyframesInLocalMap)
        local_points = set()
        local_keyframes = []
        for kf, c in local_keyframes_counts:
            local_points.update(kf.get_matched_points())
            local_keyframes.append(kf)
        return kf_ref, local_keyframes, local_points
Esempio n. 5
0
 def locally_optimize(self,
                      kf_ref,
                      verbose=False,
                      rounds=10,
                      abort_flag=g2o.Flag()):
     keyframes, points, ref_keyframes = self.local_map.update(kf_ref)
     print('local optimization window: ',
           sorted([kf.id for kf in keyframes]))
     print('                     refs: ',
           sorted([kf.id for kf in ref_keyframes]))
     print('                   #points: ', len(points))
     #print('                   points: ', sorted([p.id for p in points]))
     #err = optimizer_g2o.optimize(frames, points, None, False, verbose, rounds)
     err, ratio_bad_observations = optimizer_g2o.local_bundle_adjustment(
         keyframes,
         points,
         ref_keyframes,
         False,
         verbose,
         rounds,
         abort_flag=abort_flag,
         map_lock=self.update_lock)
     Printer.green('local optimization - perc bad observations: %.2f %%' %
                   (ratio_bad_observations * 100))
     return err
Esempio n. 6
0
    def fetch_and_prepare(self):
        """ Uses AMICleaner to retrieve candidates AMI, map and reduce """

        cleaner = AMICleaner()

        mapped_amis = cleaner.map_candidates(
            mapping_strategy=self.mapping_strategy)

        if not mapped_amis:
            return None

        candidates = []
        report = dict()

        for group_name, amis in mapped_amis.iteritems():
            group_name = group_name or ""

            if not group_name:
                report["no-tags (excluded)"] = amis
            else:
                reduced = cleaner.reduce_candidates(amis, self.keep_previous)
                if reduced:
                    report[group_name] = reduced
                    candidates.extend(reduced)

        Printer.print_report(report, self.full_report)

        return candidates
Esempio n. 7
0
def parse_tsv(file):
    films = {}
    with open(file) as tsv_file:
        data = csv.reader(tsv_file, delimiter="\t", quotechar='"')

        next(data)

        for film in data:
            movie_title = film[1]
            movie_wiki_link = film[2]
            movie = Movie(movie_wiki_link, movie_title)
            films[movie_title] = movie

            Printer.print_minus(''.join(
                ['PARSING: ', str(len(films)), ". ", movie_title]))
            imdb_link = film[3]
            movie.imdb = IMDB(imdb_link, movie_title)

            rt_link = film[5]
            movie.rotten_tomatoes = RottenTomatoes(rt_link, movie_title)

            meta_link = film[6]
            movie.metacritic = Metacritic(meta_link, movie_title)

            bom_link = film[7]
            movie.box_office_mojo = BoxOfficeMojo(bom_link)

    return films
Esempio n. 8
0
def predict_metacritic(movie):
    link = ''
    name_split = movie.name.split()
    for t in name_split:
        word = []
        word_as_list = list(t)
        # Only add word if alphanumeric
        for c in word_as_list:
            if c.isalnum():
                c = unidecode(c)
                c = c.lower()
                word.append(c)
        # Covert byte to string
        word_as_string = ''.join(word)
        # Metacritic URLs are in the format: https://www.metacritic.com/movie/word1-word2-word3/
        # Each word in the title separated by a dash
        # No dash at the end of URL
        link = ''.join([
            link, word_as_string, '-'
        ]) if len(word) > 0 else ''.join([link, word_as_string])
    new_link = urllib.parse.urljoin('http://www.metacritic.com/movie/',
                                    link[:-1])

    Printer.print_minus(''.join(
        ["MISSING METACRITIC: ", movie.name, ", Predicted Link: ", new_link]))

    return new_link
Esempio n. 9
0
 def __init__(
         self,
         num_features=kMinNumFeatureDefault,
         num_levels=3,  # number of pyramid levels for detector  
         scale_factor=1.2,  # detection scale factor (if it can be set, otherwise it is automatically computed) 
         detector_type=FeatureDetectorTypes.FAST,
         descriptor_type=FeatureDescriptorTypes.NONE,
         match_ratio_test=kRatioTest,
         tracker_type=FeatureTrackerTypes.LK):
     super().__init__(num_features=num_features,
                      num_levels=num_levels,
                      scale_factor=scale_factor,
                      detector_type=detector_type,
                      descriptor_type=descriptor_type,
                      tracker_type=tracker_type)
     self.feature_manager = feature_manager_factory(
         num_features=num_features,
         num_levels=num_levels,
         scale_factor=scale_factor,
         detector_type=detector_type,
         descriptor_type=descriptor_type)
     #if num_levels < 3:
     #    Printer.green('LkFeatureTracker: forcing at least 3 levels on LK pyr optic flow')
     #    num_levels = 3
     optic_flow_num_levels = max(kLkPyrOpticFlowNumLevelsMin, num_levels)
     Printer.green('LkFeatureTracker: num levels on LK pyr optic flow: ',
                   optic_flow_num_levels)
     # we use LK pyr optic flow for matching
     self.lk_params = dict(winSize=(21, 21),
                           maxLevel=optic_flow_num_levels,
                           criteria=(cv2.TERM_CRITERIA_EPS
                                     | cv2.TERM_CRITERIA_COUNT, 30, 0.01))
Esempio n. 10
0
def scrape(year):
    Printer.print_equal('RETRIEVING NEW DATA')
    if year == '2019':
        movies = {}
        req = requests.get('https://en.wikipedia.org/wiki/2019_in_film').text
        soup = BeautifulSoup(req, 'html.parser')
        for a in soup.find(class_='navbox-list navbox-odd hlist').findAll(
                'a', href=True):
            split_one = a['href'].split('/')
            if len(split_one) > 2:
                split_two = split_one[2].split('_')
                if len(split_two) > 4:
                    if (split_two[0] == 'List') & (split_two[1] == 'of') & (
                            split_two[-1] == '2019'):
                        movies = {
                            **movies,
                            **scrape_wikipedia(
                                ''.join([
                                    'https://en.wikipedia.org', a['href']
                                ]), year)
                        }
        return movies
    else:
        wiki_link = ''.join(
            ['https://en.wikipedia.org/wiki/', year, '_in_film'])
        return scrape_wikipedia(wiki_link, year)
Esempio n. 11
0
 def remove_points_with_big_reproj_err(self, points):
     with self._lock:
         with self.update_lock:
             #print('map points: ', sorted([p.id for p in self.points]))
             #print('points: ', sorted([p.id for p in points]))
             culled_pt_count = 0
             for p in points:
                 # compute reprojection error
                 chi2s = []
                 for f, idx in p.observations():
                     uv = f.kpsu[idx]
                     proj, _ = f.project_map_point(p)
                     invSigma2 = Frame.feature_manager.inv_level_sigmas2[
                         f.octaves[idx]]
                     err = (proj - uv)
                     chi2s.append(np.inner(err, err) * invSigma2)
                 # cull
                 mean_chi2 = np.mean(chi2s)
                 if np.mean(
                         chi2s
                 ) > Parameters.kChi2Mono:  # chi-square 2 DOFs  (Hartley Zisserman pg 119)
                     culled_pt_count += 1
                     #print('removing point: ',p.id, 'from frames: ', [f.id for f in p.keyframes])
                     self.remove_point(p)
             Printer.blue("# culled map points: ", culled_pt_count)
Esempio n. 12
0
def perform(user, _):
    """Perform Harden."""
    increase = random.randint(
        2, 3 + int(0.1 * user.stats['Special'] * user.stats['Defense']))
    user.stats['Defense'] += increase
    Printer.print_ui('  {} increases its Defense by {}.'.format(
        user.name, increase))
Esempio n. 13
0
def use(user, _):
    """Use an Echo Screen."""
    if 'Disable' in user.stats['Effects']:
        user.stats['Effects'].remove('Disable')
        Printer.print_ui('  {} can use special moves again.'.format(user.name))
    else:
        Printer.print_ui('  It\'s ineffective!')
Esempio n. 14
0
    def track_reference_frame(self, f_ref, f_cur, name=''):
        print('>>>> tracking reference %d ...' % (f_ref.id))
        if f_ref is None:
            return
        # find keypoint matches between f_cur and kf_ref
        print('matching keypoints with ', Frame.feature_matcher.type.name)
        self.timer_match.start()
        idxs_cur, idxs_ref = match_frames(f_cur, f_ref)
        self.timer_match.refresh()
        self.num_matched_kps = idxs_cur.shape[0]
        print("# keypoints matched: %d " % self.num_matched_kps)
        if kUseEssentialMatrixFitting:
            # estimate camera orientation and inlier matches by fitting and essential matrix (see the limitations above)
            idxs_ref, idxs_cur = self.estimate_pose_by_fitting_ess_mat(
                f_ref, f_cur, idxs_ref, idxs_cur)

        if kUseDynamicDesDistanceTh:
            self.descriptor_distance_sigma = self.dyn_config.update_descriptor_stat(
                f_ref, f_cur, idxs_ref, idxs_cur)

        # propagate map point matches from kf_ref to f_cur  (do not override idxs_ref, idxs_cur)
        num_found_map_pts_inter_frame, idx_ref_prop, idx_cur_prop = propagate_map_point_matches(
            f_ref,
            f_cur,
            idxs_ref,
            idxs_cur,
            max_descriptor_distance=self.descriptor_distance_sigma)
        print("# matched map points in prev frame: %d " %
              num_found_map_pts_inter_frame)

        if kDebugDrawMatches and True:
            img_matches = draw_feature_matches(f_ref.img,
                                               f_cur.img,
                                               f_ref.kps[idx_ref_prop],
                                               f_cur.kps[idx_cur_prop],
                                               f_ref.sizes[idx_ref_prop],
                                               f_cur.sizes[idx_cur_prop],
                                               horizontal=False)
            cv2.imshow('tracking frame (no projection) - matches', img_matches)
            cv2.waitKey(1)

        # store tracking info (for possible reuse)
        self.idxs_ref = idxs_ref
        self.idxs_cur = idxs_cur

        # f_cur pose optimization using last matches with kf_ref:
        # here, we use first guess of f_cur pose and propated map point matches from f_ref (matched keypoints)
        self.pose_optimization(f_cur, name)
        # update matched map points; discard outliers detected in last pose optimization
        num_matched_points = f_cur.clean_outlier_map_points()
        print('      # num_matched_map_points: %d' %
              (self.num_matched_map_points))
        #print('     # matched points: %d' % (num_matched_points) )
        if not self.pose_is_ok or self.num_matched_map_points < kNumMinInliersPoseOptimizationTrackFrame:
            f_cur.remove_frame_views(idxs_cur)
            f_cur.reset_points()
            Printer.red(
                'failure in tracking reference %d, # matched map points: %d' %
                (f_ref.id, self.num_matched_map_points))
            self.pose_is_ok = False
Esempio n. 15
0
    def prepare_candidates(self, candidates_amis=None):
        """ From an AMI list apply mapping strategy and filters """

        candidates_amis = candidates_amis or self.fetch_candidates()

        if not candidates_amis:
            return None

        c = AMICleaner()

        mapped_amis = c.map_candidates(
            candidates_amis=candidates_amis,
            mapping_strategy=self.mapping_strategy,
        )

        if not mapped_amis:
            return None

        candidates = []
        report = dict()

        for group_name, amis in mapped_amis.iteritems():
            group_name = group_name or ""

            if not group_name:
                report["no-tags (excluded)"] = amis
            else:
                reduced = c.reduce_candidates(amis, self.keep_previous)
                if reduced:
                    report[group_name] = reduced
                    candidates.extend(reduced)

        Printer.print_report(report, self.full_report)

        return candidates
Esempio n. 16
0
 def set_parent(self, keyframe):
     with self._lock_connections:
         if self == keyframe: 
             if __debug__:
                 Printer.orange('KeyFrameGraph.set_parent - trying to set self as parent')
             return 
         self.parent = keyframe 
         keyframe.add_child(self)
Esempio n. 17
0
def parse_rottentomatoes(url, url_split, movie):
    # The /m/ prefix indicates that this is a link to a movie: We're only interested in movies
    if url_split[2].startswith("com/m/"):
        movie.rotten_tomatoes = RottenTomatoes(url,
                                               movie.name,
                                               year=movie.year)

        Printer.print_minus(''.join(["FOUND ROTTEN TOMATOES: ", url]))
Esempio n. 18
0
 def large_window_BA(self):
     Printer.blue('@large BA')
     # large window optimization of the map
     self.kid_last_BA = self.kf_cur.kid   
     self.time_large_opt.start() 
     err = self.map.optimize(local_window=Parameters.kLargeBAWindow, abort_flag=self.opt_abort_flag)  # verbose=True)
     self.time_large_opt.refresh()
     Printer.blue('large window optimization error^2: %f, KF id: %d' % (err,self.kf_cur.kid))                                                                 
Esempio n. 19
0
def perform(user, other):
    """Perform Disable."""
    if random.randint(0, 99 - user.stats['Special']) > SUCCESS_RATE or \
            'Disable' in other.stats['Effects']:
        Printer.print_ui('  It\'s ineffective!')
    else:
        Printer.print_ui('  {} is now unable to perform certain moves!'.format(other.name))
        other.stats['Effects'].append('Disable')
Esempio n. 20
0
 def local_BA(self):
     # local optimization 
     self.time_local_opt.start()   
     err = self.map.locally_optimize(kf_ref=self.kf_cur, abort_flag=self.opt_abort_flag)
     self.time_local_opt.refresh()
     print("local optimization error^2:   %f" % err)       
     num_kf_ref_tracked_points = self.kf_cur.num_tracked_points(kNumMinObsForKeyFrameDefault) # number of tracked points in k_ref
     Printer.purple('KF(%d) #points: %d ' %(self.kf_cur.id, num_kf_ref_tracked_points))           
Esempio n. 21
0
 def compute(self, frame, kps=None, mask=None): # kps is a fake input, mask is a fake input
     with self.lock:        
         if self.frame is not frame:
             Printer.orange('WARNING: LFNET  is recomputing both kps and des on last input frame', frame.shape)            
             self.detectAndCompute(frame)
         return self.kps, self.des                 
        
                
Esempio n. 22
0
 def compute(self, img, kps, mask=None):
     Printer.orange(
         'WARNING: you are supposed to call detectAndCompute() for ORB2 instead of compute()'
     )
     Printer.orange(
         'WARNING: ORB2 is recomputing both kps and des on input frame',
         img.shape)
     return self.detectAndCompute(img)
Esempio n. 23
0
 def init_feature_tracker(self, tracker):
     Frame.set_tracker(tracker)  # set the static field of the class
     if kUseEssentialMatrixFitting:
         Printer.orange('forcing feature matcher ratio_test to 0.8')
         tracker.matcher.ratio_test = 0.8
     if tracker.tracker_type == FeatureTrackerTypes.LK:
         raise ValueError(
             "You cannot use Lukas-Kanade tracker in this SLAM approach!")
Esempio n. 24
0
    def need_new_keyframe(self, f_cur):
        num_keyframes = self.map.num_keyframes()
        nMinObs = kNumMinObsForKeyFrameDefault
        if num_keyframes <= 2:
            nMinObs = 2  # if just two keyframes then we can have just two observations
        num_kf_ref_tracked_points = self.kf_ref.num_tracked_points(
            nMinObs)  # number of tracked points in k_ref
        num_f_cur_tracked_points = f_cur.num_matched_inlier_map_points(
        )  # number of inliers in f_cur
        Printer.purple('F(%d) #points: %d, KF(%d) #points: %d ' %
                       (f_cur.id, num_f_cur_tracked_points, self.kf_ref.id,
                        num_kf_ref_tracked_points))

        if kLogKFinfoToFile:
            self.kf_info_logger.info(
                'F(%d) #points: %d, KF(%d) #points: %d ' %
                (f_cur.id, num_f_cur_tracked_points, self.kf_ref.id,
                 num_kf_ref_tracked_points))

        self.num_kf_ref_tracked_points = num_kf_ref_tracked_points

        is_local_mapping_idle = self.local_mapping.is_idle()
        local_mapping_queue_size = self.local_mapping.queue_size()
        print('is_local_mapping_idle: ', is_local_mapping_idle,
              ', local_mapping_queue_size: ', local_mapping_queue_size)

        # condition 1: more than "max_frames_between_kfs" have passed from last keyframe insertion
        cond1 = f_cur.id >= (self.kf_last.id + self.max_frames_between_kfs)

        # condition 2: more than "min_frames_between_kfs" have passed and local mapping is idle
        cond2 = (f_cur.id >=
                 (self.kf_last.id +
                  self.min_frames_between_kfs)) & is_local_mapping_idle
        #cond2 = (f_cur.id >= (self.kf_last.id + self.min_frames_between_kfs))

        # condition 3: few tracked features compared to reference keyframe
        cond3 = (num_f_cur_tracked_points <
                 num_kf_ref_tracked_points * Parameters.kThNewKfRefRatio) and (
                     num_f_cur_tracked_points >
                     Parameters.kNumMinPointsForNewKf)

        #print('KF conditions: %d %d %d' % (cond1, cond2, cond3) )
        ret = (cond1 or cond2) and cond3

        if ret:
            if is_local_mapping_idle:
                return True
            else:
                self.local_mapping.interrupt_optimization()
                if True:
                    if local_mapping_queue_size <= 3:
                        return True
                    else:
                        return False
                else:
                    return False
        else:
            return False
Esempio n. 25
0
def perform(user, other):
    """Perform Poison."""
    if random.randint(0, 99 - user.stats['Special']) > SUCCESS_RATE or \
            'Poison' in other.stats['Effects']:
        Printer.print_ui('  It\'s ineffective!')
    else:
        Printer.print_ui('  {} is now poisoned!'.format(other.name))
        other.stats['Effects'].append('Poison')
        other.stats['Poison Strength'] = max(user.stats['Special'] * 0.5, 8)
Esempio n. 26
0
def perform(user, other):
    """Perform Sap."""
    if random.randint(0, 99 - user.stats['Special']) > SUCCESS_RATE:
        Printer.print_ui('  It\'s ineffective!')
    else:
        decrease = min(other.stats['Base Defense'],
                       max(1, int(0.2 * user.stats['Special'])))
        other.stats['Base Defense'] -= decrease
        Printer.print_ui('  The Defense of {} drops by {}.'.format(
            other.name, decrease))
Esempio n. 27
0
def perform(user, other):
    """Perform Mimic."""
    move = other.stats['Previous move']
    if move is not None:
        print_ui('  {} mimics {} using {}.'.format(user.name, other.name,
                                                   move.NAME))
        delay_ui(1)
        user.stats['Previous move'] = move
        move.perform(user, other)
    else:
        Printer.print_ui('  It\'s ineffective!')
Esempio n. 28
0
def write_stats(turn_number, agent_fst, agent_snd):
    """Displays both players' basic stats on the screen."""
    Printer.print_ui()
    Printer.print_ui(
        '----- TURN {} ----------------------'.format(turn_number))
    Printer.print_ui('[{}]  HP: {}  PP: {}'.format(agent_fst.name,
                                                   agent_fst.stats['HP'],
                                                   agent_fst.stats['PP']))
    Printer.print_ui('[{}]  HP: {}  PP: {}'.format(agent_snd.name,
                                                   agent_snd.stats['HP'],
                                                   agent_snd.stats['PP']))
    Printer.print_ui()
Esempio n. 29
0
def perform(user, other):
    """Perform Sing."""
    try:
        Printer.print_ui('  ♪ The sound of {} singing fills the area. ♫'.format(user.name))
    except UnicodeEncodeError:
        Printer.print_ui('  (la la) The sound of {} singing fills the area. (la)'.format(user.name))
    Printer.delay_ui(1)
    if random.randint(0, 99 - user.stats['Special']) > SUCCESS_RATE or \
            'Sleep' in other.stats['Effects']:
        Printer.print_ui('  It\'s ineffective!')
    else:
        Printer.print_ui('  {} is now asleep!'.format(other.name))
        other.stats['Effects'].append('Sleep')
Esempio n. 30
0
 def getImageColor(self, frame_id):
     try:
         img = self.getImage(frame_id)
         if img.ndim == 2:
             return cv2.cvtColor(img, cv2.COLOR_GRAY2RGB)
         else:
             return img
     except:
         img = None
         #raise IOError('Cannot open dataset: ', self.name, ', path: ', self.path)
         Printer.red('Cannot open dataset: ', self.name, ', path: ',
                     self.path)
         return img