Example #1
0
    def GET(self):
        data = web.input()
        url = data['url']
        ret = {}
        if url:
            results = db.select('product', what='name,url', where='url=$url', vars={'url':url})
            if len(results):
                result = results[0]
                ret['name'], ret['url'] = result['name'], result['url']

            query = "select * from contentanalysis.analyze where text='%s'" % ret['name']
            anal = yqlquery(query)
            try:
                entities = anal['query']['results']['entities']['entity']
                for entity in entities:
                    if entity['types']['type'].get('region'):
                        ret['region'] = entity['types']['type']['region']
                        break
            except (KeyError, TypeError, ValueError):
                ret['region'] = 'unknown'

            f = flipkart()
            past_prices = fetch_prices(url)
            cur_price   = f.get_price(url)
            ret['price'] = {'past':past_prices, 'present':cur_price}

            prices = map(lambda x:x[1],past_prices)
            price_trend = order(prices)
            variances = monthly_variance(past_prices)
            variance_trend = order(variances)
            ret['trend'] = {'price':[price_trend,prices],'variance':[variance_trend,variances]}

        return serialize(ret)
Example #2
0
def start(filename):
    user_id = 1
    product_id = "1"
    skuid = 1
    reviews = "dsadas'dsd"
    score = 1.1
    clear(user_id)
    # insert(product_id, skuid, reviews, score, user_id)

    # for link in links('renka_product_url_fk_snapdeal.csv'):
    for link in links(filename):
        for rev in flipkart(link[1]):
            product_id = rev[0]
            skuid = link[0]
            reviews = rev[1]
            score = rev[2]
            user_id = 1
            insert(product_id, skuid, reviews, score, user_id)
        for rev in snapdeal(link[2]):
            product_id = rev[0]
            skuid = link[0]
            reviews = rev[1]
            score = rev[2]
            user_id = 1
            insert(product_id, skuid, reviews, score, user_id)
        # add only upto a page to prevent from going over budget
        urls = urls + eggUrls(page_soup, url)[:25]

    if 'amazon' in url:
        print('Creating URLs for amazon')
        urls = urls + zonUrls(page_soup, url)

    if 'flipkart' in url:
        print('Creating URLs for flipkart')
        # add only upto a page to prevent from going over budget
        urls = urls + kartUrls(page_soup, url)[:10]

# extract product's information and write them in csv files
for i, url in enumerate(urls):
    page_soup = parse(url)

    print(str(i + 1) + ' Page')

    # adding to test the program quickly
    # if not (i%10 == 0):
    # 	continue

    if 'newegg' in url:
        newegg(page_soup)

    if 'amazon' in url:
        amazon(page_soup)

    if 'flipkart' in url:
        flipkart(page_soup)
Example #4
0
 def GET(self):
     data = web.input(query='')
     f = flipkart()
     ret = f.search(data['query'])
     return serialize(ret)
Example #5
0
        "]:::::::::::::::::::::::::::::::::::::: Instruction  ::::::::::::::::::::::::::::::::::::::["
    )
    print(
        "]------------------------------------------------------------------------------------------["
    )
    print(
        "]::: Plesae follow all the further in instruction for smooth use of the program            ["
    )
    print(
        "]::: Now you have to enter the search item and then we ll pass it for further process      ["
    )
    print(
        "]::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::["
    )
    print(
        "                                                                                            "
    )


if __name__ == "__main__":
    #this is the main area for the excution
    display1()
    flip = f.flipkart()
    st = None
    st = flip.main()
    if st != None and len(st) != 0:
        p = pd.person(st)
    else:
        print(
            "<<< Sorry there is no item available in fllipkart and thank you for using >>>"
        )
Example #6
0
# 'http://www.flipkart.com/item/MOBE87BTKJY8QYZH'
# 'http://www.snapdeal.com/product/page/639082261977'

from snapdealstuff import snapdeal
from flipkart import flipkart
from reviews import review
from csvthingy import links

for link in links('renka_product_url_fk_snapdeal.csv'):
	for rev in flipkart(link[1]):
		print rev
	for rev in snapdeal(link[2]):
		print rev