def get_chrome_saved_password(chrome_path): """ decrypt the encrypted password in the chrome database. its accessing the chrome password file database and uses an sql query to get the data. if error occurs the function returns an array according to the error system that is defined in the error.py file. :param chrome_path: the chrome databases path :return: list of the passwords [[website, username, password]...] :return2: error number - ['err', error_number, error_info] """ data_path = os.path.join(chrome_path, 'Login Data') if not ut.file_exists(data_path): return ['err', 0, data_path] cursor = ut.connect_to_sqlite3_db(data_path) select_statement1 = 'SELECT action_url, username_value, password_value FROM logins' data = ut.execute_sql(cursor, select_statement1) if len(data) > 0: list_of_passwords = [] password = "" for result in data: try: password = win32crypt.CryptUnprotectData( result[2], None, None, None, 0)[1] # Decrypt the password list_of_passwords.append((result[0], result[1], password)) except Exception: list_of_passwords.append((result[0], result[1], password)) return list_of_passwords else: return ['err', 1, select_statement1]
def get_history(firefox_path): """ This function extracting all the history out of the history database file. Its accessing the firefox history file database and uses an sql query to get the data. If error occurs the function returns an array according to the error system that is defined in the error.py file. :param firefox_path: the firefox profiles path :return: list of bookmarks - [{'id':{'url':str,'visit_dates': list of str]....] :return: error number - ['err', [error_number, error_info]...] """ select_statement1 = "SELECT id,url,last_visit_date FROM moz_places order by id" select_statement2 = "SELECT place_id, visit_date FROM moz_historyvisits order by place_id" profiles = [i for i in os.listdir(firefox_path) if i.endswith('.default')] history = [] errs = ['err'] for i in profiles: sqlite_path = firefox_path + i + '\places.sqlite' if not ut.file_exists(sqlite_path): errs.append([8, sqlite_path]) cursor = ut.connect_to_sqlite3_db(sqlite_path) results1 = ut.execute_sql(cursor, select_statement1) results2 = ut.execute_sql(cursor, select_statement2) if len(results1) > 0: history_dict = {} for row in results1: # Url data if row[2] > 0: inner_dict = {} to_remove = [] for visit in results2: # Each url visit data if visit[0] == row[0]: if 'url' in inner_dict: # Checking if the url is already in dictionary date = str( datetime.fromtimestamp( visit[1] / 1000000).strftime('%Y-%m-%d %H:%M:%S')) inner_dict['visit_dates'].append(date) to_remove.append(visit) else: # Adding the visit times if the url is already in the dictionary inner_dict['url'] = row[1] date = str( datetime.fromtimestamp( visit[1] / 1000000).strftime('%Y-%m-%d %H:%M:%S')) inner_dict['visit_dates'] = [date] to_remove.append(visit) else: break history_dict[row[0]] = inner_dict for r in to_remove: # To reduce runtime results2.remove(r) history.append(history_dict) if len(history) > 0: if len(errs) > 1: return [history[0], errs] return [history[0]] errs.append([1, select_statement1]) return errs
def __init__(self): """ This is the init function that comes up when the object is created. Its defining if its possible to to extract data from chrome. """ self.name = "chrome" self.chrome_path = os.path.expanduser('~') + "\AppData\Local\Google\Chrome\User Data\Default" if not ut.file_exists(self.chrome_path): self.is_valid = False else: self.is_valid = True if self.is_valid: self.info_bank = {}
def __init__(self): """ This is the init function that comes up when the object is created. Its defining if its possible to to extract data from firefox. """ self.name = "firefox" self.firefox_path = os.path.expanduser( '~') + "\AppData\Roaming\Mozilla\Firefox\Profiles\\" if not ut.file_exists(self.firefox_path): self.is_valid = False else: self.is_valid = True if self.is_valid: self.info_bank = {}
def get_bookmarks(chrome_path): """ this function read all the bookmarks from the bookmark file :return: bookmarks :return: error number """ data_path = os.path.join(chrome_path, 'Bookmarks') if not ut.file_exists(data_path): return ['err', 3, data_path] input_file = io.open(data_path, 'r', encoding='utf-8') contents = loads(input_file.read()) html_for_node(contents['roots']['bookmark_bar']) html_for_node(contents['roots']['other']) return BOOKMARKS
def get_bookmarks(firefox_path): """ This function extracting all the bookmarks out of the bookmarks database file. Its accessing the firefox bookmarks file database and uses an sql query to get the data. If error occurs the function returns an array according to the error system that is defined in the error.py file. :param firefox_path: the firefox profiles path :return: list of bookmarks - [{'id':{'url':str,'date_added':str,'date_modified':str]....] :return: error number - ['err', [error_number, error_info]...] """ select_statement1 = "SELECT id,fk,parent,dateAdded,lastModified FROM moz_bookmarks" select_statement2 = "SELECT id,url,visit_count FROM moz_places" profiles = [i for i in os.listdir(firefox_path) if i.endswith('.default')] bookmarks = [] errs = ['err'] for i in profiles: sqlite_path = firefox_path + i + '\places.sqlite' if not ut.file_exists(sqlite_path): errs.append([7, sqlite_path]) cursor = ut.connect_to_sqlite3_db(sqlite_path) results1 = ut.execute_sql(cursor, select_statement1) results2 = ut.execute_sql(cursor, select_statement2) book_marks_dict = {} for row in results1: if row[1] > 0: inner_dict = {} fk = row[1] for url in results2: # Searching for the url data from the history. if url[0] == fk: inner_dict['url'] = url[1] date = str( datetime.fromtimestamp( row[3] / 1000000).strftime('%Y-%m-%d %H:%M:%S')) inner_dict['date_added'] = date date = str( datetime.fromtimestamp( row[4] / 1000000).strftime('%Y-%m-%d %H:%M:%S')) inner_dict['date_modified'] = date results2.remove(url) book_marks_dict[fk] = inner_dict break bookmarks.append(book_marks_dict) if len(bookmarks) > 0: if len(errs) > 1: return [bookmarks[0], errs] return [bookmarks[0]] errs.append([1, select_statement1]) return errs
def get_chrome_history(chrome_path): """ organizing the chrome history data in a dictonary(key = url id in the database) its accessing the chrome history file database and uses an sql query to get the data. if error occurs the function returns an array according to the error system that is defined in the error.py file. :param chrome_path: the chrome databases path :return: history dictionary - [{url_id:{'url':str,'visit_time':list, 'visit_duration':list}....},google_searches] :return: error number - ['err', error_number, error_info] """ history_db = os.path.join(chrome_path, 'history') if not ut.file_exists(history_db): # Checking if the database file exists return ['err', 2, history_db] cursor = ut.connect_to_sqlite3_db(history_db) select_statement1 = "SELECT * FROM visits" select_statement2 = "SELECT * FROM urls" select_statement3 = "SELECT * FROM keyword_search_terms" results2 = ut.execute_sql(cursor, select_statement1) results3 = ut.execute_sql(cursor, select_statement2) results4 = ut.execute_sql(cursor, select_statement3) the_dict = {} if len(results3) > 0: for url in results3: # Url data to_remove = [] for visit in results2: # Each url visit data if url[0] == visit[1]: if url[0] not in the_dict: # Checking if the url is already in dictionary inner_dict = {} inner_dict['url'] = url[1] inner_dict['visit_time'] = [str(ut.real_time_google(visit[2]))] inner_dict['visit_duration'] = [str(ut.real_time_google(visit[6], True))] the_dict[url[0]] = inner_dict to_remove.append(visit) else: # Adding the visit times and durations if the url is already in the dictionary the_dict[url[0]]['visit_time'].append(str(ut.real_time_google(visit[2]))) the_dict[url[0]]['visit_duration'].append(str(ut.real_time_google(visit[6], True))) to_remove.append(visit) for r in to_remove: # To reduce runtime results2.remove(r) searches = {} for search in results4: # Adding the google searches if search[1] in the_dict: # Chrome saving the history for 90 days but the searches fo longer url = the_dict[search[1]] searches[search[2]] = url else: # Occurs if the search is older than 90 days searches[search[2]] = "" return [the_dict, searches] return ['err', 1, select_statement2]
def get_all_cookies(chrome_path): """ this function decrypt all the encrypted cookis out of the cookie file. its accessing the chrome bookmarks file database and uses an sql query to get the data. if error occurs the function returns an array according to the error system that is defined in the error.py file. :param chrome_path: the chrome databases path :return: list of cookies - [{'host':{'name':str,'value':str,'expire':str,'time_created':str}]....] :return: error number - ['err', error_number, error_info] """ data_path = os.path.join(chrome_path, 'Cookies') if not ut.file_exists(data_path): # checking if the database file exists return ['err', 4, data_path] cursor = ut.connect_to_sqlite3_db(data_path) data = ut.execute_sql(cursor, SELECT_STATEMENT1) x = 0 if len(data) > 0: cookies = {} for result in data: try: cookie = win32crypt.CryptUnprotectData( result[0], None, None, None, 0)[1] # Decrypts the cookie except Exception, e: continue if cookie: if len(result[1]) > 0: if result[1][0] == '.': host = result[1][1:] else: host = result[1] else: host = "no site" + str(x) x += 1 time = ut.real_time_google(result[3]) time2 = ut.real_time_google(result[4]) inner_dict = { "name": result[2], "value": cookie, "expire": str(time), "time_created": str(time2) } if host not in cookies: # Its possible that a site have a multiply cookies cookies[host] = [inner_dict] else: cookies[host].append(inner_dict) return [cookies]
def get_all_cookies(firefox_path): """ This function extracting all the cookies out of the cookies database file. Its accessing the firefox cookies file database and uses an sql query to get the data. If error occurs the function returns an array according to the error system that is defined in the error.py file. :param firefox_path: the firefox profiles path :return: list of bookmarks:[{'host':{'name':str,'value':str,'creationTime':str,'expiry':str]-some cookies for host}] :return: error number - ['err', [error_number, error_info]...] """ select_statement1 = "SELECT baseDomain, name, value, expiry,creationTime FROM moz_cookies" profiles = [i for i in os.listdir(firefox_path) if i.endswith('.default')] cookies = [] errs = ['err'] for i in profiles: sqlite_path = firefox_path + i + '\cookies.sqlite' if not ut.file_exists(sqlite_path): errs.append([9, sqlite_path]) cursor = ut.connect_to_sqlite3_db(sqlite_path) results1 = ut.execute_sql(cursor, select_statement1) cookies_dict = {} if len(results1) > 0: for cookie in results1: creation = str( datetime.fromtimestamp( cookie[4] / 1000000).strftime('%Y-%m-%d %H:%M:%S')) expiry = str( datetime.fromtimestamp( cookie[3] / 1000000).strftime('%Y-%m-%d %H:%M:%S')) inner_dict = { "name": cookie[1], 'value': cookie[2], 'creationTime': creation, 'expiry': expiry } if not cookie[0] in cookies_dict: cookies_dict[cookie[0]] = [inner_dict] else: cookies_dict[cookie[0]].append(inner_dict) cookies.append(cookies_dict) if len(cookies) > 0: if len(errs) > 1: return [cookies[0], errs] return [cookies[0]] errs.append([1, select_statement1]) return errs