def select_size_and_add_to_cart(self, size):
        sizeBox = self.findElement(IDMODE.CLASS, 'exp-pdp-size-and-quantity-container')
        select2 = sizeBox.find_element_by_tag_name('a')
        select2.click()
        time.sleep(2)
        lis = sizeBox.find_elements_by_tag_name('li')
        for li in lis:
            if li.text.strip() == size:
                li.click()
                break

        time.sleep(2)
        button = self.findElement(IDMODE.CLASS, 'exp-pdp-save-container')
        button.find_element_by_tag_name('button').click()

        time.sleep(5)
        try:
            span = self.driver.find_element_by_class_name("smart-cart-header")
            if "Out of Stock" in span.text:
                logger.info("Out of Stock")
                return False
        except:
            pass

        return True
    def scrape_all_results(self):
        i = 1
        while True:
            # scrape results on a page
            self.waitUntilElementIsPresent(IDMODE.XPATH, "//table[@class='list-font-head']")
            self.scrape_results_on_page()

            # go to next page
            nextButtonTable = self.driver.find_element_by_xpath("//table[@class='list-font-head']")
            if ">>" in nextButtonTable.text:
                logger.info("Page " + str(i))
                nextButton = nextButtonTable.find_elements_by_tag_name("td")[2]
                nextButton.find_element_by_tag_name("a").click()
                i += 1
            else:
                break
def doScrape():
    browser = SiteEvents()

    logger.info("Opening Browser...")
    browser.navigate(config.baseURL)
    logger.info("...Browser Opened")

    logger.info("Entering Parameters...")
    browser.fill_search_parameters()
    logger.info("...Parameters Entered")

    logger.info("Scraping Results...")
    browser.scrape_all_results()
    logger.info("... Results Scraped")

    if len(config.Results) == 0:
        logger.info("No Results")
        return "NO_RESULTS"

    logger.info("Writing CSV...")
    inputContent.write_csv_results()
    logger.info("...CSV Written")

    return 1
def doScrape(Con, q):
    try:
        g = Gmail()
        ################ LOGIN #####################################
        q.put(('Logging In', 5), )
        logger.info("Logging In")
        try:
            g.login(Con.Username, Con.Password)
        except AuthenticationError:
            logger.exception(sys.exc_info())
            q.put(('Login Failed', 100), )
            return "AUTH_ERROR"
        ############################################################

        ################ GET LABEL MAILBOX #########################
        mailbox = None
        q.put(('Getting Mailbox', 10), )
        logger.info("Getting Mailbox")
        try:
            if Con.Label.lower() == 'inbox':
                mailbox = g.inbox()
            else:
                mailbox = g.mailbox(Con.Label)
        except:
            logger.exception(sys.exc_info())
            q.put(('Problem in fetching Gmail Label', 100), )
            return "LABEL_FETCH_ERROR"
        if not mailbox:
            q.put(('Gmail Label Not Found', 100), )
            return "LABEL_NOT_FOUND"
        ############################################################

        ################ GET EMAILS ################################
        mails = None
        q.put(('Searching For Emails', 15), )
        logger.info("Searching Emails")
        try:
            afterDate = Con.FromDate - timedelta(days=1)
            beforeDate = Con.ToDate + timedelta(days=1)
            mails = mailbox.mail(
                subject='Fiverr: Congrats! You have a new order',
                after=afterDate,
                before=beforeDate)
            mails.extend(
                mailbox.mail(subject='just ordered an Extra',
                             after=afterDate,
                             before=beforeDate))
            # mails = mailbox.mail(after=Con.FromDate, before=Con.ToDate)
        except:
            logger.exception(sys.exc_info())
            q.put(('Problem in searching for emails', 100), )
            return "EMAILS_FETCH_ERROR"
        if len(mails) == 0:
            q.put(('No Emails Found with search criteria', 100), )
            return "NO_EMAIL_FOUND"
        ############################################################

        ################ FETCH EMAILS ##############################
        q.put(('Fetching Emails', 20), )
        logger.info("Scraping Order Data From Emails")
        Con.Orders = []
        logger.info("Num of Emails found: " + str(len(mails)))
        try:
            for mail in mails:
                msg = "Fetching Email " + str(mails.index(mail) +
                                              1) + ' of ' + str(len(mails))
                per = 20 + int((float(mails.index(mail) + 1) * 100.0 * 0.6 /
                                float(len(mails))))
                q.put((msg, per), )
                #logger.info(msg)
                mail.fetch()
                gmailEvents.extract_orders_from_email(mail, Con)
        except:
            logger.exception(sys.exc_info())
            q.put(('Problem in fetching emails', 100), )
            return "EMAIL_FETCH_ERROR"
        ############################################################

        # return 'SUCCESS'

        ################ CALCULATE TOTAL AMOUNT ####################
        q.put(('Calculating Total and Revenue', 85), )
        logger.info("Calculating Total Amount")
        gmailEvents.calculate_total_amount(Con)
        ############################################################

        ################ GENERATE XLS ##############################
        q.put(('Generating XLS', 90), )
        logger.info("Generating XLS")
        gmailEvents.generate_xls(Con)
        ############################################################

        q.put(('Logging Out of Gmail', 95), )
        g.logout()
        q.put(('SUCCESS', 100), )
        return 'SUCCESS'
    except:
        if g:
            g.logout()
        logger.exception(sys.exc_info())
        q.put(('Error Occurred', 100), )
        return "ERROR"
def doScrape(Con, q):
    try:
        g = Gmail()
        ################ LOGIN #####################################
        q.put(('Logging In', 5),)
        logger.info("Logging In")
        try:
            g.login(Con.Username, Con.Password)
        except AuthenticationError:
            logger.exception(sys.exc_info())
            q.put(('Login Failed', 100),)
            return "AUTH_ERROR"
        ############################################################

        ################ GET LABEL MAILBOX #########################
        mailbox = None
        q.put(('Getting Mailbox', 10),)
        logger.info("Getting Mailbox")
        try:
            if Con.Label.lower() == 'inbox':
                mailbox = g.inbox()
            else:
                mailbox = g.mailbox(Con.Label)
        except:
            logger.exception(sys.exc_info())
            q.put(('Problem in fetching Gmail Label', 100),)
            return "LABEL_FETCH_ERROR"
        if not mailbox:
            q.put(('Gmail Label Not Found', 100),)
            return "LABEL_NOT_FOUND"
        ############################################################

        ################ GET EMAILS ################################
        mails = None
        q.put(('Searching For Emails', 15),)
        logger.info("Searching Emails")
        try:
            afterDate = Con.FromDate - timedelta(days=1)
            beforeDate = Con.ToDate + timedelta(days=1)
            mails = mailbox.mail(subject='Fiverr: Congrats! You have a new order',
                                 after=afterDate, before=beforeDate)
            mails.extend(mailbox.mail(subject='just ordered an Extra',
                                      after=afterDate, before=beforeDate))
            # mails = mailbox.mail(after=Con.FromDate, before=Con.ToDate)
        except:
            logger.exception(sys.exc_info())
            q.put(('Problem in searching for emails', 100),)
            return "EMAILS_FETCH_ERROR"
        if len(mails) == 0:
            q.put(('No Emails Found with search criteria', 100),)
            return "NO_EMAIL_FOUND"
        ############################################################

        ################ FETCH EMAILS ##############################
        q.put(('Fetching Emails', 20),)
        logger.info("Scraping Order Data From Emails")
        Con.Orders = []
        logger.info("Num of Emails found: " + str(len(mails)))
        try:
            for mail in mails:
                msg = "Fetching Email " + str(mails.index(mail)+1) + ' of ' + str(len(mails))
                per = 20 + int((float(mails.index(mail)+1) * 100.0 * 0.6 / float(len(mails))))
                q.put((msg, per),)
                #logger.info(msg)
                mail.fetch()
                gmailEvents.extract_orders_from_email(mail, Con)
        except:
            logger.exception(sys.exc_info())
            q.put(('Problem in fetching emails', 100),)
            return "EMAIL_FETCH_ERROR"
        ############################################################

        # return 'SUCCESS'

        ################ CALCULATE TOTAL AMOUNT ####################
        q.put(('Calculating Total and Revenue', 85),)
        logger.info("Calculating Total Amount")
        gmailEvents.calculate_total_amount(Con)
        ############################################################

        ################ GENERATE XLS ##############################
        q.put(('Generating XLS', 90),)
        logger.info("Generating XLS")
        gmailEvents.generate_xls(Con)
        ############################################################

        q.put(('Logging Out of Gmail', 95),)
        g.logout()
        q.put(('SUCCESS', 100),)
        return 'SUCCESS'
    except:
        if g:
            g.logout()
        logger.exception(sys.exc_info())
        q.put(('Error Occurred', 100),)
        return "ERROR"
        contents = soup.find_all("div", class_="content")
        for content in contents:
            tweetText = content.p.text
            if config.Keyword in tweetText and ('available' in tweetText or 'arrived' in tweetText):
                urls = re.findall("http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+",
                                  tweetText)
                return urls[0]
        time.sleep(4)

readProperties()

try:

    nike = NikeEvents(config.ProxyIP)

    logger.info("Opening Nike Site...")
    nike.navigate("http://store.nike.com/us/en_us/?l=shop%2Clogin")
    logger.info("...Opened Nike Site")
    logger.info(nike.driver.title)
    logger.info(nike.driver.current_url)

    logger.info("Logging in to Nike...")
    nike.loginToNike(config.Email, config.Password)

    time.sleep(10)
    logger.info("...Logged in to Nike")
    logger.info(nike.driver.title)
    logger.info(nike.driver.current_url)

    logger.info("Waiting for tweet...")
    shoeURL = get_desired_url()