def get_unchecked_links(self): ''' get a list of all the unchecked links ''' self.setup_original_socket_state() mongo_client = mongodb.mongo() unchecked_links = mongo_client.get_unchecked_links() self.setup_tor_connection() return unchecked_links
def is_link_exist(self, link): ''' check if link exist ''' self.setup_original_socket_state() mongo_client = mongodb.mongo() exist = mongo_client.is_link_exist(link) self.setup_tor_connection() return exist
def parse_page(self, page, link): ''' use bs4 to parse the page and extract data, take the data and insert it to the database. return the links extracted from the website. ''' parser = BeautifulSoup(page, 'html.parser') tags = [] links = [] mongo_client = mongodb.mongo() email_pattern = re.compile( r"([a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+)") telegram_pattern = re.compile(r"(https://t+\.+me/+[a-zA-Z0-9_.+-]+)") bitcoin_pattern = re.compile(r"^[13][a-km-zA-HJ-NP-Z1-9]{25,34}$") onion_pattern = re.compile(r"[^\s]+\.onion") try: title = parser.title.string except: title = None try: description = parser.find("meta", property="og:description")['content'] except: description = None for _link in parser.find_all('a'): _link = _link.get('href') if _link: try: bad_extentions = ['.jpg', '.png', '.pdf', '.doc'] # check if the extracted link is not from the same site as the source link source_domain = onion_pattern.findall(link)[0] extracted_domain = onion_pattern.findall(_link)[0] if ".onion" in _link and _link[len( _link ) - 4:] not in bad_extentions and extracted_domain != source_domain: links.append(_link) except: pass try: tags = mongo_client.add_tags(page) except: pass try: date = datetime.datetime.now() emails = email_pattern.findall(page) telegram_links = telegram_pattern.findall(page) bitcoin_wallets = bitcoin_pattern.findall(page) self.setup_original_socket_state() mongo_client.insert_link_info(link, title, description, emails, telegram_links, bitcoin_wallets, links, tags, date) self.setup_tor_connection() except Exception as ex: print("error inserting data to the database") print(ex)
for self.i in self.out: if (self.i["view"] == "pro"): self.procount = self.procount + 1 else: self.negcount = self.negcount + 1 print("Tweets for:", self.procount) print("Tweets against:", self.negcount) if self.procount > self.negcount: print("Twitter user are in favor of:", self.input) else: print("Twitter user are not in favor of:", self.input) elif (self.userAnswer == "4"): anas.twitPollCompare() elif (self.userAnswer == "5"): anas.outOldData() else: print("Plase enter a valid input (1,2,3,4,5).") go.menu() return 0 dis = display() threading.Thread(target=dis.slider, args=("Connecting ", )).start() twit = twitterAPI() mong = mongo() anas = analyse(mong.conn()) coll = collection(twit.authentigate(False), mong.conn()) go = Main(twit.authentigate(False), mong.conn()) dis.stop() go.menu() #calls the function that gets tweets and puts them in the DB
from bson import json_util from werkzeug.security import check_password_hash, generate_password_hash import secrets from datetime import timedelta app = Flask(__name__) app.secret_key = "super secret key" login_manager = LoginManager() login_manager.init_app(app) login_manager.session_protection = "strong" user_record = mongo() class User(): def __init__(self, username): self.username = username def is_authenticated(self): return True def is_active(self): return True def is_anonymous(self): return False