Exemple #1
0
    def get_top_pick_venues_for_city(self, city):
        # venues['response']['groups'][0]['items'][0]['venue']['stats']
        # {u'tipCount': 117, u'checkinsCount': 14673, u'usersCount': 5010}

        # venues['response']['groups'][0]['items'][0]['venue']['categories']
        if (city["city"], city["country"]) in self.top_picks:
            return self.top_picks[(city["city"], city["country"])]

        results = []
        url = fs_api.url(
            "/venues/explore",
            queryDict={"near": "%s, %s" % (city["city"], city["country"]), "section": "topPicks", "limit": "100"},
        )
        try:
            venues = self.read_json_at_url(url)
        except:
            # import pdb; pdb.set_trace()
            self.top_picks[(city["city"], city["country"])] = []
            return []

        if "groups" in venues["response"]:
            results = [item["venue"] for item in venues["response"]["groups"][0]["items"]]
        self.top_picks[(city["city"], city["country"])] = results
        self.save_top_picks()
        return results
Exemple #2
0
    def load_category_tree(self):
        if os.path.exists(CATEGORIES_FILE):
            with open(CATEGORIES_FILE) as fin:
                self.root_categories = json.loads(fin.read())
                self.build_category_datastructs()
            return

        categories = self.read_json_at_url(fs_api.url("/venues/categories", {}))
        self.root_categories = []
        self.recursive_process_categories(self.root_categories, categories["response"]["categories"])
        with open(CATEGORIES_FILE, "w") as fout:
            fout.write(json.dumps(self.root_categories))

        self.build_category_datastructs()
def serialize_cities():
    f = open('../data/small.txt', 'r')
    for line in f:
        a = line.split()
        city = a[0]
        country = a[1]

        newCity = City(city, country)

        query = 'arts'
        limit = '10'

        my_url = url('/venues/explore', {'near':city +', ' + country, 'section':query, 'limit':limit})
        result = urllib2.urlopen(my_url).read()

        newCity.activities.append( json.loads(result) )
Exemple #4
0
    def get_pois(self, city, country):
        cat_places = {}
        #    for categoryId, categoryName in self.categories.iteritems():

        # url = fs_api.url(queryDict={'near': '%s, %s' % (city, country), 'categoryId':categoryId,
        url = fs_api.url(queryDict={"near": "%s, %s" % (city, country), "section": "topPicks"})
        data = json.loads(urllib.urlopen(url).read())
        if not "groups" in data["response"]:
            print "failed to load for url %s" % url
            return

        items = data["response"]["groups"][0]["items"]
        #     cat_places[categoryId] = []
        for item in items:
            # Find the parent
            # import pdb; pdb.set_trace()

            if len(item["categories"]) == 0:
                continue

            for parent in item["categories"][0]["parents"]:
                if parent in self.name2category:
                    parent_id = self.name2category[parent]
                    if not parent_id in cat_places:
                        cat_places[parent_id] = []

            categoryId = item["categories"][0]["id"]

            cat_places[parent_id].append(
                {
                    "name": item["name"],
                    "category": categoryId,
                    "lat": item["location"]["lat"],
                    "lng": item["location"]["lng"],
                    "stats": item["stats"],
                }
            )

        for parent_id in cat_places.iterkeys():
            cat_places[parent_id] = sorted(cat_places[parent_id], key=lambda x: -1 * x["stats"]["usersCount"])

        return cat_places