def read_device_activations(directory): activation_directory = directory + "Android Device configuration service" for filename in [ os.path.join(activation_directory, name) for name in os.listdir(activation_directory) if name.endswith(".html") ]: with open(filename, "r") as file: lines = [line.strip() for line in file.readlines()] data = {} for line in lines: if not line.endswith("<br/>"): continue line = line[0:-5] items = line.split(":") if len(items) < 2: continue data[items[0]] = ":".join(items[1:]).strip() time = dateutil.parser.parse(data["Registration Time"]) kvps = { "model": data["Model"], "manufacturer": data["Manufacturer"], "device": data["Device"], "product": data["Product"], "type": data["Device Type"] } events.add( "Registered " + data["Device Type"] + ": " + data["Manufacturer"] + " " + data["Model"], time, ["google", "device", "android"], kvps)
def import_reddit(directory="data/reddit/"): events.prepare_import(3) if not os.path.isfile(directory + "submissions.json" ) or not os.path.isfile(directory + "comments.json"): get_data(directory) with db.atomic(): json_text = open(directory + "submissions.json").read() submissions = json.loads(json_text) print("Importing reddit submissions...") for submission in submissions: time = datetime.datetime.fromtimestamp(submission["time"]) events.add( "Posted to " + submission["subreddit"] + ": " + submission["title"], time, ["reddit", "post", submission["subreddit"]], kvps={k: submission[k] for k in submission if k != "time"}) json_text = open(directory + "comments.json").read() comments = json.loads(json_text) print("Importing reddit comments...") for comment in comments: time = datetime.datetime.fromtimestamp(comment["time"]) events.add("Commented in " + comment["subreddit"] + ": " + create_comment_summary(comment["message"]), time, ["reddit", "comment", comment["subreddit"]], kvps={k: comment[k] for k in comment if k != "time"})
def read_google_play_installs(directory): json_text = open(directory + "Google Play Store/Installs.json", encoding="utf8").read() install_data = json.loads(json_text) group_importer = importer.generic.GroupEvents() for install in [item["install"] for item in install_data]: time = dateutil.parser.parse(install["firstInstallationTime"]) name = install["doc"]["title"] device = install["deviceAttribute"]["deviceDisplayName"] group_importer.add(time, (name, device), device) group_importer.create_events( create_single=lambda time, data: events.add( "Installed " + data[0] + " on " + data[ 1], time, ["google", "googleplay", "install"], { "app": data[0], "device": data[1] }), create_many=lambda time, data: events.add( "Installed " + data[0][0] + " and " + str(len(data) - 1) + " other " + ("app" if len(data) == 2 else "apps") + " on " + data[ 0][1], time, ["google", "googleplay", "install"], { "apps": "\n".join([item[0] for item in data]), "device": data[0][1] }))
def read_friends(directory): data = load_to_json(directory + "friends/friends_added.json") for friend in data["friends"]: time = datetime.datetime.fromtimestamp(friend["timestamp"]) name = fix_encoding(friend["name"]) events.add("Added Facebook friend " + name + ".", time, ["facebook", "friend"], {"name": name})
def read_youtube_uploads(directory): video_directory = directory + "YouTube/videos/" for filename in [ os.path.join(video_directory, name) for name in os.listdir(video_directory) if name.endswith(".json") ]: json_text = open(filename, encoding="utf8").read() data = json.loads(json_text) video = data[0] snippet = video["snippet"] time = dateutil.parser.parse(snippet["publishedAt"]) thumbnail_key = sorted( snippet["thumbnails"].keys(), key=lambda name: snippet["thumbnails"][name]["height"], reverse=True)[0] thumbnail_url = snippet["thumbnails"][thumbnail_key]["url"] thumbnail_filename = video_directory + video[ "id"] + "." + thumbnail_url.split(".")[-1].split("?")[0] download_file(thumbnail_url, thumbnail_filename) kvps = { "description": snippet["description"], "channel": snippet["channelTitle"], "views": video["statistics"]["viewCount"], "likes": video["statistics"]["likeCount"], "title": snippet["title"], "url": "https://www.youtube.com/watch?v=" + video["id"], "visibility": video["status"]["privacyStatus"] } events.add("Uploaded video: " + snippet["title"], time, ["youtube", "video"], kvps, images=[(time, thumbnail_filename)])
def read_comments(directory): data = load_to_json(directory + "comments/comments.json") for comment in data["comments"]: time = datetime.datetime.fromtimestamp(comment["timestamp"]) message = fix_encoding(comment["data"][0]["comment"]["comment"]) events.add("Facebook: " + fix_encoding(comment["title"]), time, ["facebook", "comment"], {"message": message})
def read_saved_places(directory): data = json.loads( open(directory + "Maps (your places)/Saved Places.json").read()) for place in data["features"]: properties = place["properties"] url = properties["Google Maps URL"] latitude = 0 longitude = 0 if properties["Location"].has_key("Latitude"): latitude = float(properties["Location"]["Latitude"]) longitude = float(properties["Location"]["Longitude"]) elif properties["Location"].has_key("Geo Coordinates"): latitude = float( properties["Location"]["Geo Coordinates"]["Latitude"]) longitude = float( properties["Location"]["Geo Coordinates"]["Longitude"]) name = properties["Title"] time = dateutil.parser.parse(properties["Published"]) events.add("Saved place with Google Maps: " + name, time, ["google", "maps", "place"], { "name": name, "url": url }, latitude=latitude, longitude=longitude)
def read_photos(directory): photo_directory = directory + "photos/album/" for album_file in [ os.path.join(photo_directory, name) for name in os.listdir(photo_directory) ]: data = load_to_json(album_file) album_name = fix_encoding(data["name"]) for photo in data["photos"]: file = directory + photo["uri"] metadata = photo["media_metadata"]["photo_metadata"] time = datetime.datetime.fromtimestamp( metadata["taken_timestamp"] ) if "taken_timestamp" in metadata else datetime.datetime.fromtimestamp( metadata["modified_timestamp"]) tags = ["facebook", "photo"] kvps = {} if "camera_make" in metadata and "camera_model" in metadata: camera = metadata["camera_make"] + " " + metadata[ "camera_model"] tags.append(camera) kvps["camera"] = camera events.add("Added photo to Facebook album " + album_name + ".", time, tags, kvps, latitude=(metadata["latitude"] if "latitude" in metadata else None), longitude=(metadata["longitude"] if "longitude" in metadata else None), images=[(time, file)])
def new_order(): order_type_list = orders.order_type_list() customer_list = customers.customer_list() clinic_list = customers.clinic_list() if request.method == "GET": if users.user_status() == 1 or users.user_status() == 0: return render_template('new_order.html', order_type_list=order_type_list, customer_list=customer_list, clinic_list=clinic_list) else: return render_template("error.html", message="Käyttäjän oikeudet eivät riitä tähän toimintoon.") if request.method == "POST": clinic_id = request.form["clinic_id"] order_type_id = request.form["order_type_id"] customer_id = request.form["customer_id"] d_date = request.form["delivery_date"] d_time = request.form["delivery_time"] delivery_date = d_date + ' ' + d_time + ':00.000000' token = request.form["csrf_token"] if clinic_id == '0' or order_type_id == '0' or customer_id == '0' or d_date == '': flash("Täytä kaikki kentät!", "warning") return redirect(request.url) elif clinic_id != '0' or order_type_id != '0' or customer_id != '0' and session["csrf_token"] == token: latest_id = orders.add( order_type_id, customer_id, delivery_date, clinic_id) if (latest_id != None): events.add(latest_id, users.user()[0], "Sisäänkirjaus", 0) flash("Tilaus lisätty! Tilauksen id on: "+str(latest_id)+ ". Kirjoita se lähetteeseen.", "success") return redirect(request.url) else: flash("Tilauksen lisääminen epäonnistui", "warning") return redirect(request.url)
def read_calendar(directory): calendar_directory = directory + "Calendar/" for calendar_path in [ os.path.join(calendar_directory, name) for name in os.listdir(calendar_directory) ]: cal = icalendar.Calendar.from_ical(open(calendar_path).read()) for event in cal.walk("vevent"): events.add("Google Calendar event: " + event.get("summary"), event.get("dtstart").dt, ["google", "calendar", "event"], kvps={"location": event.get("location")})
def read_locations(directory): data = json.loads( open(directory + "Location History/Location History.json").read()) for item in data["locations"]: time = datetime.datetime.fromtimestamp(int(item["timestampMs"]) / 1000) if not item.has_key("latitudeE7"): continue latitude = item["latitudeE7"] * 1e-7 longitude = item["longitudeE7"] * 1e-7 events.add("Google Location History", time, ["google", "location"], latitude=latitude, longitude=longitude)
def log_location(message, contact): time = datetime.datetime.fromtimestamp(message.timestamp / 1000) summary = ("Sent" if message.is_sent() else "Received") + " location " + ( "to" if message.is_sent() else "from") + " " \ + contact.get_display_name() \ + (": " + message.media_caption.encode("utf-8") if message.media_caption is not None else ".") events.add(summary, time, ["whatsapp", "message", "location"], {"message": message.media_name.encode("utf-8")} if message.media_name is not None else {}, latitude=message.latitude, longitude=message.longitude)
def import_steam(directory="data/steam/"): events.prepare_import(2) print("Importing Steam purchases...") with db.atomic(): for file_name in [os.path.join(directory, name) for name in os.listdir(directory)]: tree = html.fromstring(open(file_name).read()) table = tree.xpath("//*[@id=\"main_content\"]/div/div/div/div/table/tbody") for tr in table[0][1:]: time = dateutil.parser.parse(tr[0].text) name = tr[1].text.strip() if len(tr[1]) == 1: name = tr[1][-1].tail.strip() events.add("Added Steam game " + name + " to library.", time, ["steam", "game"], kvps={"name": name})
def import_photos(directory="data/photos/"): with db.atomic(): count = 0 last_update = datetime.datetime.now() print "Importing images from " + directory + "..." for file in get_files(directory): tags = exifread.process_file(open(file, 'rb')) try: time = datetime.datetime.strptime( tags["Image DateTime"].values, "%Y:%m:%d %H:%M:%S") except ValueError: print "Bad time format: " + tags["Image DateTime"].values continue except KeyError: print "Image has no EXIF data. Skipping. " + file continue camera = "Camera" if "Image Model" in tags: camera = tags["Image Model"].values if "Image Make" in tags: camera = tags["Image Make"].values + " " + camera latitude = None longitude = None if tags.has_key("GPS GPSLatitude") and tags.has_key( "GPS GPSLatitudeRef") and tags.has_key( "GPS GPSLongitude") and tags.has_key( "GPS GPSLongitudeRef"): latitude = convert_to_degress( tags["GPS GPSLatitude"].values) * ( 1 if tags["GPS GPSLatitudeRef"].values == "N" else -1) longitude = convert_to_degress( tags["GPS GPSLongitude"].values) * ( 1 if tags["GPS GPSLongitudeRef"].values == "E" else -1) try: events.add("Took a picture with " + camera, time, ["photo", camera], {"camera": camera}, latitude=latitude, longitude=longitude, images=[file]) count += 1 except: print "Skipping file " + file + "." if (datetime.datetime.now() - last_update).total_seconds() > 5: last_update = datetime.datetime.now() print str(count) + " images found..." print "Done importing images."
def create_conversation_event(title, message_count, time, participants, history, first): kvps = {"participants": participants, "message": history} if first: events.add( "Started a Facebook conversation with " + title + " (" + str(message_count) + " message" + ("s" if message_count > 1 else "") + ").", time, ["facebook", "message"], kvps) else: events.add( "Exchanged " + str(message_count) + " Facebook message" + ("s" if message_count > 1 else "") + " with " + title + ".", time, ["facebook", "message"], kvps)
def import_csv(filename, time_index, summary, tags, kvps, skip_first_line = True, delimiter = ",", dayfirst = False): file = open(filename, encoding = "utf-8") reader = csv.reader(file, delimiter = delimiter) if skip_first_line: next(reader, None) with db.atomic(): for line in reader: current_summary = populate_csv_string(summary, line) current_kvps = {key: populate_csv_string(kvps[key], line) if (type(kvps[key]) is str) else line[kvps[key]] for key in kvps} time = dateutil.parser.parse(line[time_index], dayfirst = dayfirst) events.add(current_summary, time, tags, current_kvps)
def create_conversation_event(contact, message_count, time, history, first): kvps = {"message": history} if first: events.add( "Started a Whatsapp conversation with " + contact.get_display_name() + " (" + str(message_count) + " message" + ("s" if message_count > 1 else "") + ").", time, ["whatsapp", "message"], kvps) else: events.add( "Exchanged " + str(message_count) + " Whatsapp message" + ("s" if message_count > 1 else "") + " with " + contact.get_display_name() + ".", time, ["whatsapp", "message"], kvps)
def log_image(directory, message, contact): filename = directory + "Media/WhatsApp Images/" + ( "Sent/" if message.is_sent() else "") + message.media_name time = datetime.datetime.fromtimestamp(message.timestamp / 1000) if os.path.isfile(filename): summary = ("Sent" if message.is_sent() else "Received") + " an image " + ("to" if message.is_sent() else "from") + " " \ + contact.get_display_name() \ + (": " + message.media_caption.encode("utf-8") if message.media_caption is not None else ".") events.add(summary, time, ["whatsapp", "message", "image"], {"message": message.media_caption.encode("utf-8")} if message.media_caption is not None else {}, "wa-" + str(message._id), images=[filename])
def import_paypal(directory="data/paypal/"): with db.atomic(): for file_name in [ os.path.join(directory, name) for name in os.listdir(directory) ]: lines = open(file_name).read().split("\n") for line in lines[1:-1]: data = [value for value in line[1:-1].split('","')] time = datetime.datetime.strptime(data[0] + " " + data[1], "%d.%m.%Y %H:%M:%S") name = data[3] if len(name) == 0 or data[4] == "Allgemeine Autorisierung": continue currency = data[6] if currencies.has_key(currency): currency = currencies[currency] amount = data[9].replace(",", ".") if len(amount) == 0: continue amount_positive = amount[0] != "-" amount_absolute = amount.replace("-", "") hash = data[12] item = data[15] kvps = { "account": data[10], "message": item, "recipient-name": name, "recipient-account": data[11], "amount": amount } if amount_positive: events.add("Received " + currency + " " + amount_absolute + " from " + name + (" for " + item if len(item) > 0 else "") + " using Paypal.", time, ["money", "paypal"], kvps, hash=hash) else: events.add("Paid " + currency + " " + amount_absolute + " to " + name + (" for " + item if len(item) > 0 else "") + " using Paypal.", time, ["money", "paypal"], kvps, hash=hash)
def read_app_posts(directory): data = load_to_json(directory + "apps/posts_from_apps.json") for post in data["app_posts"]: attachment_data = post["attachments"][0]["data"][0]["external_context"] time = datetime.datetime.fromtimestamp(post["timestamp"]) message = fix_encoding(attachment_data["name"]) title = fix_encoding(post["title"]) app_name = "unknown app" if "via" in title: app_name = fix_encoding(title[title.index("via") + 4:-1]) kvps = {"message": message, "title": title, "app": app_name} if "url" in attachment_data: kvps["url"] = attachment_data["url"] events.add("Facebook post via " + app_name + ": " + message, time, ["facebook", "post", "app"], kvps)
def import_linkedin(directory="data/linkedin/"): events.prepare_import(7) print("Importing LinkedIn contacts...") with db.atomic(): file = open(directory + "Connections.csv", encoding = "utf8") reader = csv.reader(file, delimiter = ",") next(reader, None) for line in reader: name = line[0] + " " + line[1] email = line[2] company = line[3] position = line[4] time = dateparser.parse(line[5]) events.add("Added LinkedIn contact " + name + ".", time, ["linkedin", "friend"], kvps = {"email": email, "company": company, "position": position})
def new_event(): user_data = users.user() order_list = orders.listAll() order_list_not_tuple = [list(elem) for elem in order_list] event_list = [list(elem) for elem in events.event_list()] event_descr_list = events.common_events() if request.method == "GET": if users.user_status() == 1 or users.user_status() == 0: return render_template("new_event.html", user_data=user_data, order_list=order_list, order_list_not_tuple=order_list_not_tuple, event_list=event_list, event_descr_list=event_descr_list) else: return render_template( "error.html", message="Käyttäjän oikeudet eivät riitä tähän toimintoon.") if request.method == "POST": order_id = request.form["order_id"] description_drop = request.form["description_drop"] description_text = request.form["description_text"] description = "" if (description_drop != ""): description = description_drop elif (description_text != ""): description = description_text else: flash("Valitse työvaiheen kuvaus listasta tai kirjoita kuvaus", "warning") return redirect(request.url) user_id = user_data[0] is_pending = request.form["is_pending"] in_progress = request.form["in_progress"] token = request.form["csrf_token"] if orders.seek(order_id) != None and description != "" and \ events.add(order_id, user_id, description, is_pending) and session["csrf_token"] == token: if in_progress == "0": orders.check_out_in(order_id, in_progress) events.add(order_id, user_id, "Uloskirjaus", 0) flash( "Työvaihe '" + description + "' lisätty tilaukselle " + order_id, "success") return redirect(request.url) else: flash("Työvaiheen lisääminen epäonnistui", "warning") return redirect(request.url)
def import_wordpress(directory="data/wordpress/"): with db.atomic(): for file_name in [os.path.join(directory, name) for name in os.listdir(directory)]: tree = ElementTree.parse(file_name) channel = tree.find("channel") for item in channel: if item.tag != "item": continue title = item.find("title").text time = dateutil.parser.parse(item.find("pubDate").text) url = item.find("guid").text content = next(element.text for element in item if element.tag.endswith("encoded")) content = re.sub("[\<].*?[\>]", "", content) tags = [element.text for element in item if element.tag == "category"] events.add("Posted Wordpress article: " + title, time, ["wordpress"] + list(set(tags)), {"title": title, "message": content, "url": url})
def create_conversation_event(contact, message_count, time, history, images, sent_any, received_any): kvps = {"message": history} tags = ["whatsapp", "message"] if len(images) != 0: tags.append("photo") summary = "Exchanged" if sent_any and received_any else ( "Sent" if sent_any else "Received") summary += " " summary += "a message" if message_count == 1 else (str(message_count) + " messages") summary += " " summary += "with" if sent_any and received_any else ( "to" if sent_any else "from") summary += " " + contact.get_display_name() + "." events.add(summary, time, tags, kvps, images=images)
def read_messages(query, replies): tags = ["email"] + (["reply"] if replies else []) for item in query: time = item.get_time() author = item.content.c3author subject = ("Re: " if replies else "") + item.content.c1subject body = item.content.c0body recipients = item.content.c4recipients kvps = { "author": author, "subject": subject, "recipients": recipients, "message": "" if body is None else body } summary = ("Replied to " if replies else "Sent an email to " ) + get_short_recipients(recipients) + ": " + subject events.add(summary, time, tags, kvps)
def read_youtube_playlist(filename, name): json_text = open(filename, encoding="utf8").read() data = json.loads(json_text) for item in data: snippet = item["snippet"] time = dateutil.parser.parse(snippet["publishedAt"]) kvps = { "description": snippet["description"], "title": snippet["title"], "url": "https://www.youtube.com/watch?v=" + item["contentDetails"]["videoId"] + "&index=" + str(snippet["position"]) + "&list=" + snippet["playlistId"] } events.add("Added video to " + name + ": " + snippet["title"], time, ["youtube", name.lower()], kvps)
def create_like_event(likes): if len(likes) == 1: events.add("Liked video: " + likes[0].title, likes[0].time, ["youtube", "like"], kvps={ "title": likes[0].title, "url": likes[0].url }) else: events.add("Liked " + str(len(likes)) + " videos: " + likes[0].title + " and " + str(len(likes) - 1) + (" others." if len(likes) > 2 else " other."), likes[0].time, ["youtube", "like"], kvps={ "titles": "\n".join([like.title for like in likes]), "url": likes[0].url, "urls": "\n".join([like.url for like in likes]) })
def new_order(): today_datetime = datetime.datetime.now() order_type_list = orders.order_type_list() customer_list = customers.customer_list() clinic_list = customers.clinic_list() if request.method == "GET": if users.user_status() == 1 or users.user_status() == 0: return render_template("new_order.html", order_type_list=order_type_list, customer_list=customer_list, clinic_list=clinic_list) else: return render_template( "error.html", message="Käyttäjän oikeudet eivät riitä tähän toimintoon.") if request.method == "POST": clinic_id = request.form["clinic_id"] order_type_id = request.form["order_type_id"] customer_id = request.form["customer_id"] d_date = request.form["delivery_date"] d_time = request.form["delivery_time"] delivery_date = d_date + " " + d_time + ":00.000000" dd_datetime = datetime.datetime.strptime(delivery_date, "%Y-%m-%d %H:%M:%S.%f") token = request.form["csrf_token"] if clinic_id == "0" or order_type_id == "0" or customer_id == "0" or d_date == "": flash("Täytä kaikki kentät!", "warning") return redirect(request.url) if dd_datetime - today_datetime < datetime.timedelta(minutes=1): flash("Pyydetty toimitusaika on menneisyydessä", "warning") return redirect(request.url) elif clinic_id != "0" or order_type_id != "0" or customer_id != "0" and session[ "csrf_token"] == token: latest_id = orders.add(order_type_id, customer_id, delivery_date, clinic_id) if (latest_id != None): events.add(latest_id, users.user()[0], "Sisäänkirjaus", 0) flash( "Tilaus lisätty! Tilauksen id on: " + str(latest_id) + ". Kirjoita se lähetteeseen.", "success") return redirect(request.url) else: flash("Tilauksen lisääminen epäonnistui", "warning") return redirect(request.url)
def read_google_play_devices(directory): json_text = open(directory + "Google Play Store/Devices.json", encoding="utf8").read() device_data = json.loads(json_text) for device in device_data: time = dateutil.parser.parse( device["device"]["deviceRegistrationTime"]) data = device["device"]["mostRecentData"] kvps = { "carrier": data["carrierName"], "manufacturer": data["manufacturer"], "model": data["modelName"], "device": data["deviceName"], "product": data["productName"] } events.add( "Registered Google Play device " + data["manufacturer"] + " " + data["modelName"] + ".", time, ["google", "googleplay", "device"], kvps)
def read_events(directory): data = load_to_json(directory + "events/event_responses.json") for event in data["event_responses"]["events_joined"]: time = datetime.datetime.fromtimestamp(event["start_timestamp"]) name = fix_encoding(event["name"]) events.add("Participated in Facebook event: " + name, time, ["facebook", "event"], {"name": name}) data = load_to_json(directory + "events/your_events.json") for event in data["your_events"]: time = datetime.datetime.fromtimestamp(event["start_timestamp"]) name = fix_encoding(event["name"]) location = fix_encoding(event["place"]["name"]) events.add("Hosted Facebook event: " + name, time, ["facebook", "event"], { "name": name, "location": location, "message": event["description"] })