def save_Preset(self): if len(self.Dialog_Window.window.Name_Input.text()) == 0: QMessageBox.about(self, "eMag Price Tracker", "You must choose a name for the preset!") list = self.get_Presets() if str(self.Dialog_Window.window.Name_Input.text()) in list: QMessageBox.about(self, "eMag Price Tracker", "The preset already exists!") else: db_manager = DatabaseManager('Database.db') if db_manager.check_database(): cursor = db_manager.conn.cursor() cursor.execute("""create table if not exists Products( link text, preset text, previous_price integer )""") for i in range(self.window.Tracker_List.count()): cursor.execute( """INSERT INTO Products VALUES(?,?,?)""", (str(self.window.Tracker_List.item(i).text()), str(self.Dialog_Window.window.Name_Input.text()), 0)) cursor.execute("SELECT * FROM Products") print(cursor.fetchall()) db_manager.conn.commit() db_manager.close_connection() self.window.Tracker_List.clear() self.Dialog_Window.hide()
def get_Presets(self): db_manager = DatabaseManager('Database.db') db_manager.check_database() cursor = db_manager.conn.cursor() cursor.execute("SELECT DISTINCT(preset) FROM Products") list = cursor.fetchall() list = [x[0] for x in list] return list
def check_price(self, products, Notif_List, email): header = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36" } # initialize the prices previous_prices = [x[1] for x in products] links = [x[0] for x in products] if 0 in previous_prices: self.initialize_prices(products) # checking the prices every 900sec db_manager = DatabaseManager('Database.db') db_manager.check_database() cursor = db_manager.conn.cursor() while True: for i in range(0, len(links)): page = requests.get(links[i], headers=header) main = BeautifulSoup(page.content, 'html.parser') price = main.find(class_="product-new-price").get_text() name = main.find(class_="page-title").get_text() name = name.strip() price = price.strip() price = self.number(price[0:len(price) - 6]) if price < previous_prices[i]: aux = previous_prices[i] - price print_message = "Price has been dropped by {0} lei for product {1}".format( aux, name) previous_prices[i] = price subject = "Price was decreased by {0} lei for product {1}".format( aux, name) body = "Check the link {}".format(links[i]) message = f"Subject: {subject}\n\n{body}" self.send_mail(message, email) ######################################## cursor.execute( "UPDATE Products SET previous_price =? WHERE link=?", (previous_prices[i], links[i])) Notif_List.addItem(QListWidgetItem(print_message)) db_manager.conn.commit() else: if price < previous_prices[i]: print_message = "Price was increased by {0} lei for product {1}".format( price - previous_prices[i], name) Notif_List.addItem(QListWidgetItem(print_message)) else: print_message = "Price it's the same for product {0}".format( name) Notif_List.addItem(QListWidgetItem(print_message)) print(print_message) time.sleep(300)
def load_Preset(self): self.window.Notif_List.clear() preset_name = self.Select_Window.window.List.currentItem().text() db_manager = DatabaseManager('Database.db') db_manager.check_database() cursor = db_manager.conn.cursor() cursor.execute( "SELECT link,previous_price FROM Products WHERE preset=?", (preset_name, )) self.Select_Window.hide() if str(preset_name) is not None: self.window.Status_Label.setText(str(preset_name) + " loaded!") ######################################### self.start_thread(cursor.fetchall(), self.window.Notif_List, self.email) self.preset_is_loaded = True
def __init__(self): self.db = DatabaseManager.DatabaseManager() self.threadPool = ThreadPoolExecutor(10) asyncio.get_event_loop().set_default_executor(self.threadPool) self.users = Users.User() self.chats = Chats.Chats() self.private_key: RsaKey = None self.public_key: RsaKey = None self.load_key()
def delete_Preset(self): preset_name = self.Select_Window2.window.List.currentItem().text() db_manager = DatabaseManager('Database.db') db_manager.check_database() cursor = db_manager.conn.cursor() cursor.execute("DELETE FROM Products WHERE preset=?", (preset_name, )) cursor.execute("SELECT * FROM Products") print(cursor.fetchall()) db_manager.conn.commit() db_manager.close_connection() self.Select_Window2.hide()
def initialize_prices(self, products): # initialize the prices previous_prices = [x[1] for x in products] links = [x[0] for x in products] header = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36" } for i in range(0, len(links)): page = requests.get(str(links[i]), headers=header) main = BeautifulSoup(page.content, 'html.parser') price = main.find(class_="product-new-price").get_text() price = price.strip() price = self.number(price[0:len(price) - 6]) previous_prices[i] = price db_manager = DatabaseManager('Database.db') db_manager.check_database() cursor = db_manager.conn.cursor() for i in range(0, len(previous_prices)): cursor.execute( "UPDATE Products SET previous_price =? WHERE link=?", (previous_prices[i], links[i])) db_manager.conn.commit() db_manager.close_connection()
def __init__(self): QObject.__init__(self) AbstractCrawler.__init__(self) self.settings = QSettings("RFCode", "TwitterCrawler") self.db = DatabaseManager() self.max_id = 0
class Crawler(QObject, AbstractCrawler): db = None """ @type: DatabaseManager """ rest = None """ @type: RestCrawler """ streaming = None """ @type: StreamingCrawler """ auth = None """ @type: OAuthHandler """ threadPool = [] """ @type: Array """ def __init__(self): QObject.__init__(self) AbstractCrawler.__init__(self) self.settings = QSettings("RFCode", "TwitterCrawler") self.db = DatabaseManager() self.max_id = 0 def authInit(self): self.rest = RestCrawler(self.auth) self.streaming = StreamingCrawler(self.auth, headers={"User-Agent": "TwitterCrawler/1.0"}) self.rest.restDataReady.connect(self.updateSearchStep) self.streaming.listener.streamingDataReady.connect(self.updateSearchStep) self.streaming.listener.streamingError.connect(self.errorHandler) def getAuthUrl(self): self.auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) return self.auth.get_authorization_url() def setAuthAccess(self, verifier): self.auth.get_access_token(verifier) self.settings.setValue("auth_key", self.auth.access_token.key) self.settings.setValue("auth_secret", self.auth.access_token.secret) def login(self): key = self.settings.value("auth_key") secret = self.settings.value("auth_secret") if key == None or secret == None: return False if self.auth == None: self.auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) self.auth.set_access_token(key, secret) self.authInit() return True @Slot(int) def errorHandler(self, code): print "error", code if code >= 600: return self.stop() # if code == 420: #rate limit error # pass #TODO # elif code == 600: #streaming parsing error # pass # elif code == 601: #database insertion error # pass @Slot("QVariant") def updateSearchStep(self, step): for i in range(len(step) - 1, -1, -1): dbId = self.db.addTweet( step[i]["userName"], step[i]["tweet"], step[i]["year"], step[i]["month"], step[i]["day"], step[i]["hour"], step[i]["minute"], step[i]["second"], ) step[i]["dbId"] = dbId if dbId == -1: print "duplicated" step.pop(i) else: for h in step[i]["hashtags"]: self.db.addHashtag(dbId, h) for l in step[i]["links"]: self.db.addLink(dbId, l) if step[i]["location"] != False: self.db.addLocation(dbId, step[i]["location"]["lat"], step[i]["location"]["lon"]) if step[i]["id"] != None and self.max_id < step[i]["id"]: self.max_id = step[i]["id"] self.db.commit() @AbstractCrawler.crawlingAction @AbstractCrawler.traceHistory def getTweetsInsideArea(self, lat1, lon1, lat2, lon2, crawler=REST_CRAWLER | STREAMING_CRAWLER, **parameters): """Get tweets inside the given bounding box""" if (crawler & REST_CRAWLER) == REST_CRAWLER: self.threadPool.append(MyThread(self.rest.getTweetsInsideArea, lat1, lon1, lat2, lon2, **parameters)) if (crawler & STREAMING_CRAWLER) == STREAMING_CRAWLER: self.threadPool.append(MyThread(self.streaming.getTweetsInsideArea, lat1, lon1, lat2, lon2, **parameters)) @AbstractCrawler.crawlingAction @AbstractCrawler.traceHistory def getTweetsByContent(self, content, crawler=REST_CRAWLER | STREAMING_CRAWLER, **parameters): if (crawler & REST_CRAWLER) == REST_CRAWLER: self.threadPool.append(MyThread(self.rest.getTweetsByContent, content, **parameters)) if (crawler & STREAMING_CRAWLER) == STREAMING_CRAWLER: self.threadPool.append(MyThread(self.streaming.getTweetsByContent, content)) @AbstractCrawler.crawlingAction @AbstractCrawler.traceHistory def getTweetsByUser(self, username, crawler=REST_CRAWLER | STREAMING_CRAWLER, **parameters): if (crawler & REST_CRAWLER) == REST_CRAWLER: self.threadPool.append(MyThread(self.rest.getTweetsByUser, username, **parameters)) if (crawler & STREAMING_CRAWLER) == STREAMING_CRAWLER: self.threadPool.append(MyThread(self.streaming.getTweetsByUser, username, **parameters)) def export(self, output): self.db.dumpDb(output) def stop(self): self.max_id = 0 removable = [] self.streaming.stop() for i in range(len(self.threadPool)): if not self.threadPool[i].isRunning(): removable.append(i) for i in removable: self.threadPool.pop(i) self.db.commit()
class Crawler(QObject, AbstractCrawler): db = None """ @type: DatabaseManager """ rest = None """ @type: RestCrawler """ streaming = None """ @type: StreamingCrawler """ auth = None """ @type: OAuthHandler """ threadPool = [] """ @type: Array """ def __init__(self): QObject.__init__(self) AbstractCrawler.__init__(self) self.settings = QSettings("RFCode", "TwitterCrawler") self.db = DatabaseManager() self.max_id = 0 def authInit(self): self.rest = RestCrawler(self.auth) self.streaming = StreamingCrawler( self.auth, headers={"User-Agent": "TwitterCrawler/1.0"}) self.rest.restDataReady.connect(self.updateSearchStep) self.streaming.listener.streamingDataReady.connect( self.updateSearchStep) self.streaming.listener.streamingError.connect(self.errorHandler) def getAuthUrl(self): self.auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) return self.auth.get_authorization_url() def setAuthAccess(self, verifier): self.auth.get_access_token(verifier) self.settings.setValue("auth_key", self.auth.access_token.key) self.settings.setValue("auth_secret", self.auth.access_token.secret) def login(self): key = self.settings.value("auth_key") secret = self.settings.value("auth_secret") if key == None or secret == None: return False if self.auth == None: self.auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) self.auth.set_access_token(key, secret) self.authInit() return True @Slot(int) def errorHandler(self, code): print "error", code if code >= 600: return self.stop() # if code == 420: #rate limit error # pass #TODO # elif code == 600: #streaming parsing error # pass # elif code == 601: #database insertion error # pass @Slot("QVariant") def updateSearchStep(self, step): for i in range(len(step) - 1, -1, -1): dbId = self.db.addTweet(step[i]["userName"], step[i]["tweet"], step[i]["year"], step[i]["month"], step[i]["day"], step[i]["hour"], step[i]["minute"], step[i]["second"]) step[i]["dbId"] = dbId if dbId == -1: print "duplicated" step.pop(i) else: for h in step[i]["hashtags"]: self.db.addHashtag(dbId, h) for l in step[i]["links"]: self.db.addLink(dbId, l) if step[i]["location"] != False: self.db.addLocation(dbId, step[i]["location"]["lat"], step[i]["location"]["lon"]) if step[i]["id"] != None and self.max_id < step[i]["id"]: self.max_id = step[i]["id"] self.db.commit() @AbstractCrawler.crawlingAction @AbstractCrawler.traceHistory def getTweetsInsideArea(self, lat1, lon1, lat2, lon2, crawler=REST_CRAWLER | STREAMING_CRAWLER, **parameters): '''Get tweets inside the given bounding box''' if (crawler & REST_CRAWLER) == REST_CRAWLER: self.threadPool.append( MyThread(self.rest.getTweetsInsideArea, lat1, lon1, lat2, lon2, **parameters)) if (crawler & STREAMING_CRAWLER) == STREAMING_CRAWLER: self.threadPool.append( MyThread(self.streaming.getTweetsInsideArea, lat1, lon1, lat2, lon2, **parameters)) @AbstractCrawler.crawlingAction @AbstractCrawler.traceHistory def getTweetsByContent(self, content, crawler=REST_CRAWLER | STREAMING_CRAWLER, **parameters): if (crawler & REST_CRAWLER) == REST_CRAWLER: self.threadPool.append( MyThread(self.rest.getTweetsByContent, content, **parameters)) if (crawler & STREAMING_CRAWLER) == STREAMING_CRAWLER: self.threadPool.append( MyThread(self.streaming.getTweetsByContent, content)) @AbstractCrawler.crawlingAction @AbstractCrawler.traceHistory def getTweetsByUser(self, username, crawler=REST_CRAWLER | STREAMING_CRAWLER, **parameters): if (crawler & REST_CRAWLER) == REST_CRAWLER: self.threadPool.append( MyThread(self.rest.getTweetsByUser, username, **parameters)) if (crawler & STREAMING_CRAWLER) == STREAMING_CRAWLER: self.threadPool.append( MyThread(self.streaming.getTweetsByUser, username, **parameters)) def export(self, output): self.db.dumpDb(output) def stop(self): self.max_id = 0 removable = [] self.streaming.stop() for i in range(len(self.threadPool)): if not self.threadPool[i].isRunning(): removable.append(i) for i in removable: self.threadPool.pop(i) self.db.commit()
def setUp(self) -> None: sys.argv = [sys.argv[0]] self.db = DatabaseManager.DatabaseManager(memory_db=True) self.ws = WebSocketServer.WebSocketServer()
def main(cls): db_manage = DatabaseManager.DatabaseManager() db_manage.db_init() WebSocketServer.WebSocketServer.start_server()