Esempio n. 1
0
def server_program():
    # get the hostname
    scrape = Scrape()
    scrape.data_list()

    host = socket.gethostname()
    port = 5000  # initiate port no above 1024

    server_socket = socket.socket()  # get instance
    server_socket.bind((host, port))  # bind host address and port together

    # configure how many client the server can listen simultaneously
    server_socket.listen(1024)
    conn, address = server_socket.accept()  # accept new connection
    print("Connection from: " + str(address))

    while True:
        # receive data stream. it won't accept data packet greater than 1024 bytes
        data = conn.recv(1024).decode()
        print(data)

        if not data:
            # if data is not received break
            break
        print("from connected user: " + str(data))
        data = input(' -> ')
        conn.send(data.encode())  # send data to the client

    conn.close()  # close the connection
Esempio n. 2
0
def generate():
    print("Generating options")
    s = Scrape()
    s.initializeDriver()
    print("Driver Initialized")
    # Wait 20 seconds for page to load
    try:
        WebDriverWait(s.driver, s.timeout).until(
            EC.visibility_of_element_located(
                (By.ID, 'competitors-quote-sectors')))
        soup = BeautifulSoup(s.driver.page_source, 'lxml')
        #accessing the dropdown
        options = soup.find(id='competitors-quote-sectors').find_all('option')
        s.end()

        quoteSectors = {}
        for option in options:
            quoteSectors[option.text] = option['value']

        with open('resources/quote_sectors.json', 'w') as f:
            json.dump(json.loads(json.dumps(quoteSectors)), f)
            print(
                "Options successfully generated in resources/quote_sectors.json"
            )

    except TimeoutException:
        print(
            'Timed out waiting for options to load. Check your Internet Connection.'
        )
        s.end()
Esempio n. 3
0
 def drop_down(self):
     self.window.geometry("500x500")
     self.window.title("Choose Country to Graph ..")
     Label(text="").pack()
     Label(text="Select Country").pack()
     Label(text="").pack()
     scrape = Scrape()
     list1 = scrape.data_list()
     print(list1)
     clicked = StringVar()
     clicked.set(list1[0])
     drop = OptionMenu(self.window, clicked, *list1, command=self.selecter)
     drop.pack()
     self.window.mainloop()
Esempio n. 4
0
    def Connect(self):
        # configure how many client the server can listen simultaneously
        print("server is started and listening on port >>>>>>>>", self.port)
        self.server_socket.listen(5)
        conn, address = self.server_socket.accept()
        print("Connection from: " + str(address))

        while True:
            print("Looping while loop")
            data = conn.recv(1024).decode()
            if not data:
                # if data is not received break
                break
            print("Country Selected : ", data)
            scrape = Scrape()
            scrape.data_list()
            business = Business()
            country_data = business.countryData(data)
            print(type(country_data))
            msg = pickle.dumps(country_data)
            print("done pickling")
            conn.send(msg)
            print("DATA SENT TO THE CLIENT.... ")
Esempio n. 5
0
class MyTest(unittest.TestCase):

    def setUp(self):
        self.s = Scrape()

    def test_generate_options(self):
        generate()
        self.assertTrue(os.path.exists('resources/quote_sectors.json'))

    def test_get_dict_quoteSectors(self):
        d = {'SIC-7370 Services-Computer Programming, Data Processi': '-737A', 
             'Internet - Services': '-ITSE', 
             'Indices Nasdaq 100': '-INO', 
             'Indices S&P 100': '-ISO', 
             'Indices S&P 500': '-ISFI', 
             'Indices S&P 500 Telcomm': '-SAPL', 
             'Indices Nasdaq Composite': '-NASC', 
             'Indices Russell 1000': '-RUSO', 
             'Indices Russell 3000': '-RUSH'}
        self.assertEqual(self.s.get_dict_quoteSectors(), d)

    def test_infer(self):
        self.s.infer('resources/data.txt')
        self.assertTrue(os.path.exists('resources/Symbols.json'))
Esempio n. 6
0
def get_data():
    """
    Example:
    /scrape/?number=100&from=01/02/2020&to=10/02/2020
    :return: json scraped data
    """
    item_number = request.args.get("number")
    from_date = request.args.get("from")
    to_date = request.args.get("to")

    scrape = Scrape(numbers_per_page=item_number,
                    from_date=from_date,
                    to_date=to_date)
    scrape.startSelenium()
    # Without this the API stores the previous result and it returns is
    scrape.clear()

    scrape.parseData()
    scrape.clean_data()
    scrape.delete_duplicates()
    scraped_list = scrape.get_data()
    result = scraped_list.copy()
    scraped_list.clear()
    scrape.quit()
    return jsonify(result)
Esempio n. 7
0
 def setUp(self):
     self.s = Scrape()
Esempio n. 8
0
# Import socket module

port = 50000                    # Reserve a port for your service every new transfer wants a new port or you must wait.
s = socket.socket()             # Create a socket object
host = socket.gethostname()   # Get local machine name
s.bind((host, port))            # Bind to the port
s.listen(5)                     # Now wait for client connection.

print('Server listening....')

while True:
    conn, addr = s.accept()     # Establish connection with client.
    print('Got connection from', addr)
    data = conn.recv(1024)
    print('Server received', repr(data))
    scrape = Scrape()
    scrape.data_list()
    business = Business()
    country_data = business.countryData(data)
    l = pickle.dumps(country_data)


    # filename='dog.jpg' #In the same folder or path is this file running must the file you want to tranfser to be
    # f = open(filename,'rb')
    # l = f.read(1024)

    while (l):
        conn.send(l)
        print('Sent ',repr(l))
        l = f.read(1024)
Esempio n. 9
0
from main import Article,Scrape
import constants

s=Scrape(constants.Username,constants.Password)
Articlelist=s.query('sopa',location=constants.Geo,graph=False)
#use Articlelist[0].get_url  or get_title or get_desc or get_date
for a in Articlelist:
    a.printArticle()
    
    
Esempio n. 10
0
from main import Article,Scrape
import constants

s=Scrape(constants.Username,constants.Password)
Articlelist=s.query('sopa')