Exemple #1
0
def main():
    args = list(sys.argv)

    if len(args) == 2:
        user_timestamp = int(args[1])

        scrapperObj = scrapper.Scrapper()

        print "Running scrapper..."
        # scrapperObj.cleanDB()
        """
        Second way I tried to do the refresh.
        This time I tried sending all parameters, also didn't work with multiple
        parameters at once.

        SetInterval(10, make_call, reddit, db_conn, "python")
        try:
            sleep(35)
        finally:
            thread.stop()
        thread2 = set_interval(make_call(reddit, db_conn, "python"), 15)


        Finally, I gave up and did a very stupid - though working - way of
        handling this task.
        """
        while True:
            for sub in scrapperObj.subreddits:
                scrapperObj.make_call(sub, int(user_timestamp))
            time.sleep(120)
    else:
        print "Usage: python main.py TIMESTAMP"
Exemple #2
0
    def test_make_call(self):
        test_json = [{
            "id":
            "1234123",
            "title":
            "This is a test title",
            "selftext":
            "Test is a test for testing tests.",
            "num_comments":
            45,
            "created":
            1495490048,
            "subreddit":
            "test",
            "comments": [
                "this is the first comment", "this is the second commment",
                "this is the third comment"
            ]
        }]

        scrapper_call = scrapper.Scrapper()
        mock_subreddit = mock.MagicMock()
        # mocking the submissions part of the subreddits call.
        mock_subreddit.submissions.return_value = test_json
        scrapper_call.reddit.subreddit = mock.MagicMock(
        ).side_effect = mock_subreddit
        result = scrapper_call.make_call("python", 1495490048)

        self.assertEqual(result, True)
Exemple #3
0
 def clickedButton(self):
     print("Entered")
     handle = self.name_input.text()
     print(handle)
     try:
         s = scrapper.Scrapper(handle)
         list = s.problemsolved()
         for items in list:
             self.cb.addItem(items)
         self.rating.setText("Codechef Rating ::   " + s.overAllRating())
         self.countryRank.setText("Country Rank ::  " + s.countryRank())
         self.globalRank.setText("Global Rank ::   " + s.globalRank())
         self.ratingStars.setText("Rating Stars ::   " + s.ratingStars())
         self.highestRating.setText("Highest Rating ::   " +
                                    s.highestRating())
         self.contestAttended.setText("Contest Attended ::   " +
                                      str(s.contestAttended()))
     except:
         pri = "NULL"
         self.button.setText(":( Invalid Input")
         self.rating.setText("Codechef Rating ::   " + pri)
         self.countryRank.setText("Country Rank ::  " + pri)
         self.globalRank.setText("Global Rank ::   " + pri)
         self.ratingStars.setText("Rating Stars ::   " + pri)
         self.highestRating.setText("Highest Rating ::   " + pri)
         self.contestAttended.setText("Contest Attended ::   " + pri)
def scrape(url, n=0):
    meetup = scr.Scrapper(url)
    meetup.event_urls()
    meetup.event_info(n)
    print('Events data load')
    meetup.attendees_info()
    print('Attendees data load')
    meetup.members_info()
    print('Members data load')
    meetup.organisers_info()
    print('Organisers data load')
    return meetup
Exemple #5
0
def main():
    url = 'https://www.meetup.com/find/?keywords=data%20science'

    meetup = scrapper.Scrapper(url)
    meetup.event_urls()
    meetup.event_info()
    meetup.attendees_info()
    meetup.members_info()
    meetup.organisers_info()

    data = Database()
    data.populate_tables_organizers(meetup.organisers_df)
    data.populate_tables_members(meetup.members_df)
    data.populate_tables_events(meetup.event_df)
    data.populate_tables_attendee(meetup.attendees_df)
Exemple #6
0
    def getResultsAndAddToTable(self):
        scrap = scrapper.Scrapper(self.kt, self.ktu, self.v, self.f, self.s)
        for i in scrap.getTable():
            for c, j in enumerate(range(0, len(i))):
                self.gbSonuclar.setVisible(True)
                self.lblTaksit.setText(_translate("Form", "Taksit Tutarı : {}".format(i[j]['Taksit']), None))
                self.lblFaiz.setText(_translate("Form", "Faiz Oranı      : {}".format(str(self.txtFaiz.text())), None))

                self.tableResult.setRowCount(len(i))
                self.tableResult.setItem(c, 0, QtGui.QTableWidgetItem(str(i[j]['Sira'])))
                self.tableResult.setItem(c, 1, QtGui.QTableWidgetItem(str(i[j]['Taksit'])))
                self.tableResult.setItem(c, 2, QtGui.QTableWidgetItem(str(i[j]['Anapara'])))
                self.tableResult.setItem(c, 3, QtGui.QTableWidgetItem(str(i[j]['Faiz'])))
                self.tableResult.setItem(c, 4, QtGui.QTableWidgetItem(str(i[j]['Bakiye'])))
                self.tableResult.setItem(c, 5, QtGui.QTableWidgetItem(str(i[j]['KKDF'])))
                self.tableResult.setItem(c, 6, QtGui.QTableWidgetItem(str(i[j]['BSMV'])))
Exemple #7
0
    def test_stage_no_kwd(self):
        submissions_json = [{
            "id":
            "1234123",
            "title":
            "This is a test title",
            "selftext":
            "Test is a test for testing tests.",
            "num_comments":
            45,
            "created":
            1495490048,
            "subreddit":
            "test",
            "comments": [
                "this is the first comment", "this is the second commment",
                "this is the third comment"
            ]
        }]
        comments_json = [{
            "id": "1234123",
            "body": "Test",
            "sub_id": "Test",
            "subreddit": "python",
            "created": "123"
        }]

        scrapper_call = scrapper.Scrapper()
        mock_comments = mock.MagicMock()
        mock_comments.find.return_value = comments_json
        mock_submissions = mock.MagicMock()
        mock_submissions.find.return_value = submissions_json
        scrapper_call.db_conn.Submissions = mock.MagicMock(
        ).side_effect = mock_submissions
        scrapper_call.db_conn.Comments = mock.MagicMock(
        ).side_effect = mock_comments
        result = scrapper_call.stage_one("subreddit", 1245522425, 1244546225)

        self.assertTrue(result)
Exemple #8
0
 def __init__(self):
     self.scrapper = scrapper.Scrapper()
     self.version = self.scrapper.get_version()
     self.con = sqlite3.connect("./DataTFT")
     self.cur = self.con.cursor()
Exemple #9
0
def schedule():
    team = request.form.get('team_select')
    sc = scrapper.Scrapper()
    items = sc.scrape(team)
    return render_template('schedule.html', content=items)
Exemple #10
0
import scrapper as sc
import predictions as pre
import statistics as st

default = True
s = sc.Scrapper()
p = pre.Prediction()
stats = st.Statistics()


def commands():
    coms = "/createLinks          --> Creating links for drivers standings and constructor standings\n" \
            "/editLinks            --> Editing the links for driver standings and constructor standings\n" \
            "/viewStandings        --> View the current standings for driver standings and constructor standings\n" \
            "/makeNewPrediction    --> Add a new prediction of how the drivers and teams will rank\n" \
            "/addAnotherPrediction --> Adding another prediction of how the drivers and teams will rank\n" \
            "/viewPrediction       --> Viewing a prediction\n" \
            "/deletePrediction     --> Delete a prediction\n" \
            "/showZeros            --> Show the number of teams/drivers who have not gained points\n" \
            "/calculateScore       --> Calculate the inaccuracy score for the predictions\n" \
            "/end                  --> Ends the program"
    return coms


while default:
    response = input("What would you like to access: ")
    if response == "/createLinks":
        s.create_links()
        print("Links created")
    elif response == "/commands":
        print(commands())
Exemple #11
0
 def Command1(self):
     
     i=self.input1.get()
     scrapper.Scrapper(i)
 def __init__(self):
     self.config_parser = config_parser.Configuration_Parser()
     self.scrapper = scrapper.Scrapper(self.config_parser)
Exemple #13
0
''' Main Module for Project '''

import sys
import scrapper
import deployment
import report_builder

if __name__ == "__main__":
    if len(sys.argv) > 1:
        if sys.argv[1] == '-d':
            deployment.deploy()

    scrapper = scrapper.Scrapper()
    report_builder = report_builder.ReportBuilder()

    report_builder.print_reports(scrapper.scrape())
Exemple #14
0
import scrapper
import driver_config


def addNewLine(string):
    return "{}\n".format(string)


if __name__ == '__main__':

    url = "https://moat.com/"
    obj = scrapper.Scrapper(driver_config.DRIVER_PATH)

    with open(driver_config.OUTPUT_PATH, "w") as file_write:

        # 1. Verify the search bar autocomplete drop down text.
        obj.open_website(url)
        autocomplete_result = obj.verify_autocomplete("Saturn")
        file_write.write(addNewLine(autocomplete_result))

        # 2. Verify the creative count on the search results page is
        # correct for these 3 search terms: Saturn, Saturday’s Market,
        # and Krux.
        products_list = ["Saturn", "Saturday’s Market", "Krux"]
        for product in products_list:
            obj.open_website(url)
            creative_check_result = obj.verify_creative(product)
            file_write.write(addNewLine(creative_check_result))

        # 3. Verify the “Random Brand” link on the search results
        # page is random.
Exemple #15
0
import requests
import random
import time
import scrapper

if __name__ == '__main__':
    path_to_key = 'secret.txt'
    scrapper = scrapper.Scrapper(path_to_key)
    scrapper.request_match_list('11121212122')
Exemple #16
0
 def setUp(self):
     self.scrapper = scrapper.Scrapper(interval=settings.interval)
Exemple #17
0
 def test_stage_one_false(self):
     scrapper_call = scrapper.Scrapper()
     self.assertFalse(
         scrapper_call.stage_one("subreddit", "string", 123442.123123))
     self.assertFalse(
         scrapper_call.stage_one("subr¤©«±¶µeddit", 1245522425, 1244546225))
Exemple #18
0
 def test_make_call_non_ascii(self):
     scrapperObj = scrapper.Scrapper()
     result = scrapperObj.make_call("¤©«±¶µ", 1495490048)
     self.assertEqual(result, False)