def get_single_items_data(item_url):
    source_code = requests.get(item_url)
    plain_text = source_code.text
    soup = BeautifulSoup(plain_text)
    for item_name in soup.findAll('span', {'class': ['postingtitletext','postingingo']}):
        print(item_name.text)
    for link in soup.findAll('a'):
        href = link.get('href')
        #print(href)
        map_regex = re.compile(r'\bmaps.google.com/maps\b')

        try:
            mo = map_regex.search(href)
            if not test_unique(href,map_list) and mo:
                c = DBConnector()
                result = c.insert('maplink',maplinkcol=mo.string)
                #print(c.getCount('maplink'))
                #print(result)
                print('Location added: ' + mo.string)
                #json.dump(mo.string,'maps.txt')
                fw = open('maps.txt','a')
                fw.write(mo.string)
                fw.write('\n')
                fw.close()
                map_list.append(mo.string)
        except:
            pass

        #print(href)
        if not test_unique(href,link_list):
            pass
            #print(href)
        link_list.append(href)
Exemple #2
0
    def buyGame(self):
        c = DBConnector()

        u = self.user.getUser()
        userid = u[0][0]

        g = self.game.getGame()
        gameid = g[0][0]

        result = c.insert('usergames',idusers=userid,idgames=gameid)
        return result