Esempio n. 1
0
def main():
    cfg = geoproc_cfg.config
    lookupconn = None
    lookupcur = None
    try:
        import mysql.connector as mdb
        lookupconn = mdb.connect(
          host=cfg.get("mysql", "maxmind_server"),
          user=cfg.get("mysql", "maxmind_read_username"),
          password=geoproc_cfg.db_password("maxmind_read_password_file"),
          db=cfg.get("mysql", "maxmind_schema"),
          use_unicode=True
        )
        lookupcur = lookupconn.cursor(cursor_class=geoproc_cfg.MySQLCursorDict)
    except:
        sys.stderr.write("Warning: Could not connect to database. Proceeding without database support.\n")
        pass

    annoconn, annocur = geoproc_library.connect_to_fs_anno_db(args.anno)

    outconn = sqlite3.connect(os.path.join(args.out_dir, "cookie_files_votes.db"))
    outconn.isolation_level = "EXCLUSIVE"
    outconn.row_factory = sqlite3.Row
    outcur = outconn.cursor()
    outcur.execute(SQL_CREATE_COOKIE_FILES_VOTES)
    
    #Walk cookie dump directory
    #TODO? Use a validly-extracted manifest file instead of walking dump directory.
    for (dirpath, dirnames, filenames) in os.walk(args.cookie_dump_dir):
        for cookie_txt_fname in (x for x in filenames if x.endswith("txt")):
            cookie_fiwalk_id = int(os.path.splitext(cookie_txt_fname)[0])
            dprint("Reading cookie_fiwalk_id: %r." % cookie_fiwalk_id)
            with open(os.path.join(dirpath, cookie_txt_fname),"r",encoding="utf-8") as cookie_file:
                try:
                    some_kibs = cookie_file.read(0x8000)
                except:
                    sys.stderr.write("Warning: Reading file %r failed.  Stack trace follows.\n" % cookie_txt_fname)
                    sys.stderr.write(traceback.format_exc())
                    continue
                if len(some_kibs) == 0x8000:
                    sys.stderr.write("Warning: Skipped abnormally large 'cookie' file, >=32KiB: %r.\n" % cookie_txt_fname)
                    continue
                votes = get_cookie_votes(outconn, lookupcur, annocur, cookie_fiwalk_id, some_kibs)
                for vote in votes:
                    geoproc_library.insert_db(outcur, "cookie_files_votes", vote)
    outconn.commit()
Esempio n. 2
0
def main():
    global args
    parser = argparse.ArgumentParser(description="Analyze Bulk Extractor EXIF output for location indicators.")
    parser.add_argument("-d", "--debug", action="store_true", help="Enable debug printing (writes to stderr).")
    parser.add_argument("-r", "--regress", action="store_true", help="Run regression tests and exit.")
    args_regress = parser.parse_known_args()[0]

##  Set up regular expressions for extracting desired EXIF tags
    relatref = re.compile(br"<Exif.GPSInfo.GPSLatitudeRef>(?P<GPSLatitudeRef>[NS])\</Exif.GPSInfo.GPSLatitudeRef>")
    relongref = re.compile(br"<Exif.GPSInfo.GPSLongitudeRef>(?P<GPSLongitudeRef>[EW])</Exif.GPSInfo.GPSLongitudeRef>")
    relat = re.compile(br"<Exif.GPSInfo.GPSLatitude>(?P<GPSLatitude>[0-9\-/ ]{1,40})</Exif.GPSInfo.GPSLatitude>")
    relong = re.compile(br"<Exif.GPSInfo.GPSLongitude>(?P<GPSLongitude>[0-9\-/ ]{1,40})</Exif.GPSInfo.GPSLongitude>")
    retimestamp = re.compile(br"<Exif.GPSInfo.GPSTimeStamp>(?P<GPSTimeStamp>[0-9/ ]{1,40})</Exif.GPSInfo.GPSTimeStamp>")
    redatestamp = re.compile(br"<Exif.GPSInfo.GPSDateStamp>(?P<GPSDateStamp>[0-9: .]{1,40})</Exif.GPSInfo.GPSDateStamp>")
    redatetime  = re.compile(br"<Exif.Image.DateTime>(?P<DateTime>[0-9: .]{1,40})</Exif.Image.DateTime>")
    
    if args_regress.regress:
        assert round(dms_to_decimal(b"33/1 49/1 42/1"), 4) == 33.8283
        assert round(dms_to_decimal(b"33/1 49/1 0/1"), 4) == round(dms_to_decimal(b"33/1 49/1"), 4)
        assert dms_to_decimal(b"0/0 0/0 0/0") is None #This value was observed in the Real Data Corpus.
        #TODO assert hms_fraction_to_decimal(b"8/1 49/1 18/1") == "8:49:18"
        #Sample EXIF data supplied by m57-redacted-terry-2009-12-07.aff, image offset 2116743168, pretty-printed with xmllint
        test_exif = b"""<?xml version="1.0"?>
<exif>
  <width>48</width>
  <height>48</height>
  <Exif.Image.Make>Apple</Exif.Image.Make>
  <Exif.Image.Model>iPhone</Exif.Image.Model>
  <Exif.Image.XResolution>72/1</Exif.Image.XResolution>
  <Exif.Image.YResolution>72/1</Exif.Image.YResolution>
  <Exif.Image.ResolutionUnit>2</Exif.Image.ResolutionUnit>
  <Exif.Image.DateTime>2008:11:26 11:46:56</Exif.Image.DateTime>
  <Exif.Image.ExifTag>180</Exif.Image.ExifTag>
  <Exif.Photo.FNumber>14/5</Exif.Photo.FNumber>
  <Exif.Photo.DateTimeOriginal>2008:11:26 11:46:56</Exif.Photo.DateTimeOriginal>
  <Exif.Photo.DateTimeDigitized>2008:11:26 11:46:56</Exif.Photo.DateTimeDigitized>
  <Exif.Photo.ColorSpace>1</Exif.Photo.ColorSpace>
  <Exif.Photo.PixelXDimension>1200</Exif.Photo.PixelXDimension>
  <Exif.Photo.PixelYDimension>1600</Exif.Photo.PixelYDimension>
  <Exif.Image.GPSTag>306</Exif.Image.GPSTag>
  <Exif.GPSInfo.GPSLatitudeRef>N</Exif.GPSInfo.GPSLatitudeRef>
  <Exif.GPSInfo.GPSLatitude>38/1 5354/100 0/1</Exif.GPSInfo.GPSLatitude>
  <Exif.GPSInfo.GPSLongitudeRef>W</Exif.GPSInfo.GPSLongitudeRef>
  <Exif.GPSInfo.GPSLongitude>92/1 2343/100 0/1</Exif.GPSInfo.GPSLongitude>
  <Exif.GPSInfo.GPSTimeStamp>11/1 46/1 788/100</Exif.GPSInfo.GPSTimeStamp>
  <Exif.Image.0xa500>11/5</Exif.Image.0xa500>
  <Exif.Thumbnail.Compression>6</Exif.Thumbnail.Compression>
  <Exif.Thumbnail.Orientation>6</Exif.Thumbnail.Orientation>
  <Exif.Thumbnail.XResolution>72/1</Exif.Thumbnail.XResolution>
  <Exif.Thumbnail.YResolution>72/1</Exif.Thumbnail.YResolution>
  <Exif.Thumbnail.ResolutionUnit>2</Exif.Thumbnail.ResolutionUnit>
  <Exif.Thumbnail.JPEGInterchangeFormat>550</Exif.Thumbnail.JPEGInterchangeFormat>
  <Exif.Thumbnail.JPEGInterchangeFormatLength>11682</Exif.Thumbnail.JPEGInterchangeFormatLength>
</exif>"""
        assert not relat.search(test_exif) is None
        assert not relong.search(test_exif) is None
        assert relatref.search(test_exif).group("GPSLatitudeRef") == b"N"
        assert relongref.search(test_exif).group("GPSLongitudeRef") == b"W"
        exit(0)

    parser.add_argument("-a", "--anno", help="Annotation database of Fiwalk and TSK-db")
    parser.add_argument("exif_file", type=argparse.FileType('rb'), help="Bulk Extractor exif.txt")
    args = parser.parse_args()

    dprint("Debug: args.anno = %r.\n" % args.anno)

##  Connect to db
    cfg = geoproc_cfg.config
    refconn = mysql.connector.Connect(
      host=cfg.get("mysql", "maxmind_server"),
      user=cfg.get("mysql", "maxmind_read_username"),
      password=geoproc_cfg.db_password("maxmind_read_password_file"),
      db=cfg.get("mysql", "maxmind_schema"),
      use_unicode=True
    )
    if refconn is None:
      raise Exception("Error: Could not define lookup cursor.")
    refcur = refconn.cursor(cursor_class=geoproc_cfg.MySQLCursorDict)

##  Connect to output db
    outconn = sqlite3.connect("exif_headers_votes.db")
    outconn.isolation_level = "EXCLUSIVE"
    outconn.row_factory = sqlite3.Row
    outcur = outconn.cursor()
    outcur.execute(SQL_CREATE_EXIF_HEADERS_VOTES)

##  Connect to anno db if available
    annoconn, annocur = geoproc_library.connect_to_fs_anno_db(args.anno)

    for binary_line in args.exif_file:
        binary_line_parts = binary_line.split(b"\t")
        if len(binary_line_parts) < 3:
            #We don't even have exif data. Skip.
            continue
        recdict = dict()
        recdict["forensic_path"] = str(binary_line_parts[0], "ascii")
        exif_data = binary_line_parts[2]
        match_exif_gps_lat = relat.search(exif_data)
        match_exif_gps_lon = relong.search(exif_data)
        #The above matches are essential
        if None in [match_exif_gps_lat, match_exif_gps_lon]:
            continue

        exif_gps_lat_decimal = dms_to_decimal(match_exif_gps_lat.group("GPSLatitude"))
        exif_gps_lon_decimal = dms_to_decimal(match_exif_gps_lon.group("GPSLongitude"))
        try:
            if not None in [exif_gps_lat_decimal, exif_gps_lon_decimal]:
                recdict["exif_gps_lat"] = round(exif_gps_lat_decimal, 4)
                recdict["exif_gps_lon"] = round(exif_gps_lon_decimal, 4)
        except IndexError:
            #Didn't find lat or long content. Warn and continue.
            sys.stderr.write("Warning: Couldn't find a lat (maybe long) from these matches:\n\t%r\n\t%r\n" % (match_exif_gps_lat.group(0), match_exif_gps_lon.group(0)))

        #This script's only purpose is finding lat/longs
        if None in [recdict.get("exif_gps_lat"), recdict.get("exif_gps_lon")]:
            continue

        #Lat/long references, we can guess: Default to N,E.
        match_exif_gps_latref = relatref.search(exif_data)
        match_exif_gps_longref = relongref.search(exif_data)
        exif_gps_latref = b"N"
        if match_exif_gps_latref:
            exif_gps_latref = match_exif_gps_latref.group("GPSLatitudeRef")
        exif_gps_longref = b"E"
        if match_exif_gps_longref:
            exif_gps_longref = match_exif_gps_longref.group("GPSLongitudeRef")

        if exif_gps_latref == b"S":
            recdict["exif_gps_lat"] *= -1
        if exif_gps_longref == b"W":
            recdict["exif_gps_lon"] *= -1

        #Times, we can guess from the file if we really need to.
        match_exif_timestamp = retimestamp.search(exif_data)
        if match_exif_timestamp:
            recdict["exif_gps_timestamp"] = hms_fraction_to_decimal(match_exif_timestamp.group("GPSTimeStamp"))
        match_exif_datestamp = redatestamp.search(exif_data)
        if match_exif_datestamp:
            recdict["exif_gps_datestamp"] = match_exif_datestamp.group("GPSDateStamp")
        match_exif_datetime = redatetime.search(exif_data)
        if match_exif_datetime:
            recdict["exif_datetime"] = match_exif_datetime.group("DateTime")
        #TODO integrate times into output

        refrecs = geoproc_library.latlongs_to_networked_locations(refcur, recdict["exif_gps_lat"], recdict["exif_gps_lon"], 30)
        if refrecs is None:
            recdict["database_queried"] = False
        else:
            recdict["database_queried"] = True
            #Get the nearest city within 30 miles
            if len(refrecs) > 0 and refrecs[0]["distance_miles"] < 30:
                refrec = refrecs[0]
                recdict["country"] = refrec["country"]
                recdict["region"] = refrec["region"]
                recdict["city"] = refrec["city"]
                recdict["postalCode"] = refrec["postalCode"]
                recdict["distance_miles"] = refrec["distance_miles"]

        #Note the name of the file containing this EXIF data, if available
        annorecs = geoproc_library.forensic_path_to_anno_recs(annocur, recdict["forensic_path"])

        if annorecs and len(annorecs) > 0:
            for annorec in annorecs:
                outdict = copy.deepcopy(recdict)
                outdict["fs_obj_id"] = annorec.get("fs_obj_id")
                outdict["obj_id"] = annorec.get("obj_id")
                outdict["fiwalk_id"] = annorec.get("fiwalk_id")

                #Look at file system path and say if we think it's in a cache
                if outdict.get("obj_id"):
                    annocur.execute("""
                      SELECT
                        full_path
                      FROM
                        tsk_file_full_paths
                      WHERE
                        obj_id = ?;
                    """, (outdict["obj_id"],))
                    pathrows = [row for row in annocur]
                    if len(pathrows) == 1:
                        outdict["file_in_web_cache"] = geoproc_library.path_in_web_cache(pathrows[0]["full_path"])

                #Output
                geoproc_library.insert_db(outcur, "exif_headers_votes", outdict)
        else:
            #Output to database without owning-file annotations
            geoproc_library.insert_db(outcur, "exif_headers_votes", recdict)
    outconn.commit()
Esempio n. 3
0
def main():
    global args
    #Connect to anno db if available
    annoconn, annocur = geoproc_library.connect_to_fs_anno_db(args.anno)

    #Connect to db
    cfg = geoproc_cfg.config
    refconn = mysql.connector.Connect(
      host=cfg.get("mysql", "maxmind_server"),
      user=cfg.get("mysql", "maxmind_read_username"),
      password=geoproc_cfg.db_password("maxmind_read_password_file"),
      db=cfg.get("mysql", "maxmind_schema"),
      use_unicode=True
    )
    if refconn is None:
        raise Exception("Error: Could not define lookup cursor.")
    refcur = refconn.cursor(cursor_class=geoproc_cfg.MySQLCursorDict)

    outconn = sqlite3.connect("ipv4s_votes.db")
    outconn.isolation_level = "EXCLUSIVE"
    outconn.row_factory = sqlite3.Row
    outcur = outconn.cursor()

    outcur.execute(SQL_CREATE_IPV4S_VOTES)

    pairing_dict = collections.defaultdict(list)
    ip_set = set([])
    for (ipno, (forensic_path, ipv4, ipv4_notes)) in enumerate(geoproc_library.bulk_extractor_ips(args.be_dir)):
        pairing_dict[forensic_path].append((ipv4, ipv4_notes))
        ip_set.add(ipv4)

    #Unfortunately, there isn't much to do for timestamps without file system or network time information. #TODO Add time interface
    dummy_dftime = dfxml.dftime("2009-05-01T00:00:00Z")

    ips_to_locs = geoproc_library.ips_to_locations(refcur, None, ip_set)

    for forensic_path in pairing_dict:
        #Determine if we have a pair
        entries_at_path = pairing_dict[forensic_path]
        pair_found = len(entries_at_path) == 2
        for (ipv4, ipv4_notes) in entries_at_path:
            outdict = dict()
            outdict["believed_timestamp"] = dummy_dftime.iso8601()
            outdict["forensic_path"] = forensic_path
            outdict["ipv4"] = ipv4
            outdict["ipv4_notes"] = ipv4_notes
            if "cksum-bad" in ipv4_notes:
                outdict["cksum_ok"] = False
            elif "cksum-ok" in ipv4_notes:
                outdict["cksum_ok"] = True
            #None, otherwise
            outdict["is_socket_address"] = "sockaddr" in ipv4_notes
            outdict["pair_found"] = pair_found
            if "(src)" in ipv4_notes:
                outdict["src_or_dst"] = "src"
            elif "dst" in ipv4_notes:
                outdict["src_or_dst"] = "dst"
            #None, otherwise 
            annorecs = geoproc_library.forensic_path_to_anno_recs(annocur, outdict["forensic_path"])
            if annorecs and len(annorecs) > 1:
                sys.stderr.write("Warning: Multiple files found to own forensic path %r. Only using first.  This may cause strange results.\n" % outdict["forensic_path"])
            if annorecs and len(annorecs) > 0:
                annorec = annorecs[0]
                outdict["obj_id"] = annorec.get("obj_id")
                outdict["fs_obj_id"] = annorec.get("fs_obj_id")
                outdict["fiwalk_id"] = annorec.get("fiwalk_id")

            if ipv4 in ips_to_locs:
                for key in [
                  "maxmind_ipv4_time",
                  "country",
                  "region",
                  "city",
                  "postalCode",
                  "latitude",
                  "longitude"
                ]:
                    outdict[key] = ips_to_locs[ipv4][key]

            geoproc_library.insert_db(outcur, "ipv4s_votes", outdict)
    outconn.commit()
Esempio n. 4
0
def main():
    global args

    #To keep down on placemarker clutter, gather information by distinct lat/long.
    #Key: (lat,long) floats.
    #Value: List of records.
    latlong_dict = collections.defaultdict(list)

    
    annoconn = None
    annocur = None
    if args.fs_anno_dir:
        annoconn, annocur = geoproc_library.connect_to_fs_anno_db(os.path.join(args.fs_anno_dir, "tsk_fiwalk_anno.db"))

    for (table, script, dbfile) in TABLE_SCRIPT_DB:
        if args.__dict__.get(table):
            conn = sqlite3.connect(args.__dict__[table])
            conn.row_factory = sqlite3.Row
            cur = conn.cursor()
            
            #Get votes
            if args.precision_db:
                dprint("Debug: Joining precision data.")
                cur.execute("ATTACH DATABASE '%s' AS precision;" % args.precision_db)
                #Build join clause by getting columns from vectors table
                cur.execute("SELECT * FROM precision." + table + "_precision_vectors;")
                rows = [row for row in cur]
                fields = rows[0].keys()
                join_clause = " AND ".join(["p." + field + " IS v." + field for field in fields])

                sql_query = "SELECT * FROM %s as v LEFT JOIN precision.%s_precision as p ON %s;" % (table, table, join_clause)
                dprint("Debug: Query with precision is: %r." % sql_query)
                cur.execute(sql_query)
            else:
                #No precision information available
                dprint("Debug: Not joining precision data.")
                cur.execute("SELECT * FROM %s as v;" % table)

            #Convert records to expected dictionary-list format
            for rawrow in cur:
                row = {key:rawrow[key] for key in rawrow.keys()}
                if not (row.get("latitude") and row.get("longitude")):
                    continue
                row["source_table"] = table
                latlong_dict[(row["latitude"], row["longitude"])].append(row)

            #Clean up connections
            if args.precision_db:
                cur.execute("DETACH DATABASE precision;")
            conn.close()
    
    if len(latlong_dict) == 0:
        dprint("Debug: Found nothing to report.")
        sys.exit(0)

    print(kml_head)

    #TODO Take one pass over the whole latlong_dict and sort the records
    #for (latitude,longitude) in latlong_dict:
    #    rows_to_sort = []
    #    for row in latlong_dict[(latitude,longitude)]:
    #        loc_ranker = row.get("p_correct_location")
    #        if loc_ranker is None:
    #            loc_ranker = -1.0
    #        rows_to_sort.append( (loc_ranker, row) )
    #    rows_sorted = sorted(rows_to_sort, reverse=True)
    #    best_row = latlong_dict[(latitude,longitude)][0]

    for (latitude,longitude) in latlong_dict:
        #Determine marker name
        #For now: Just city, by popularity histogram
        #TODO Add believed precision
        name_triples = collections.defaultdict(lambda: 0)
        for row in latlong_dict[(latitude,longitude)]:
            #None is sometimes a legitimate value; dict.get just substitutes on a missing key.
            name_triples[(row.get("country") or " (no country)", row.get("region") or " (no region)", row.get("city") or " (no city)")] += 1
        names_votes = sorted([ (name_triples[k], k) for k in name_triples.keys() ])
        #dprint("Debug: names_votes = %r." % names_votes)
        placemark_name = ", ".join(names_votes[0][1])

        placemark_description_list = []
        placemark_description_list.append("<dl>")
        placemark_description_list.append("<dt>Latitude, longitude</dt>")
        placemark_description_list.append("<dd>%f, %f</dd>" % (latitude, longitude))

        placemark_description_list.append("<dt>Number of artifacts indicating this location</dt>")
        placemark_description_list.append("<dd>%d</dd>" % len(latlong_dict[(latitude,longitude)]))

        #Add to description: Other location names, if any
        if len(names_votes) > 1:
            placemark_description_list.append("<dt>Other location names</dt>")
            placemark_description_list.append("<dd><table>")
            placemark_description_list.append("  <thead><tr><th>Name</th><th>Number of occurrences</th></tr></thead>")
            placemark_description_list.append("  <tfoot></tfoot>")
            placemark_description_list.append("  <tbody>")
            for (tally, name) in names_votes:
                placemark_description_list.append("<tr><td>%s</td>%d<td></td></tr>" % (name, tally))
            placemark_description_list.append("  </tbody>")
            placemark_description_list.append("</table></dd>")

        placemark_description_list.append("</dl>")
        #Add to description: List of files whose contents support this artifact
        if annocur:
            placemark_description_list.append("<table><caption>Supporting artifacts found on the disk, ordered by location precision</caption>")
            placemark_description_list.append("""
<thead>
  <tr>
    <th rowspan="3">TSK fs_obj_id</th>
    <th rowspan="3">TSK obj_id</th>
    <th rowspan="3">Fiwalk id</th>
    <th rowspan="3">Forensic path</th>
    <th rowspan="3">File path</th>
    <th rowspan="3">Within-file record number</th>
    <th colspan="8">Weighted precision: Correct / Number of assertions</th>
  </tr>
  <tr>
    <th colspan="2">Location</th>
    <th colspan="2">Country</th>
    <th colspan="2">Region</th>
    <th colspan="2">City</th>
  </tr>
  <tr>
    <th>%</th>
    <th>C/N</th>
    <th>%</th>
    <th>C/N</th>
    <th>%</th>
    <th>C/N</th>
    <th>%</th>
    <th>C/N</th>
  </tr>
</thead><tfoot></tfoot><tbody>""")
            for row in latlong_dict[(latitude,longitude)]:
                placemark_description_list.append("<tr>")
                placemark_description_list.append("<td>%s</td>" % str(row.get("fs_obj_id", "")))
                placemark_description_list.append("<td>%s</td>" % str(row.get("obj_id", "")))
                placemark_description_list.append("<td>%s</td>" % str(row.get("fiwalk_id", "")))
                placemark_description_list.append("<td>%s</td>" % str(row.get("forensic_path", "")))

                if args.anonymize:
                    placemark_description_list.append("<td>(redacted)</td>")
                elif annocur is None:
                    placemark_description_list.append("<td>(data unavailable)</td>")
                else:
                    annorows = []
                    if row.get("fs_obj_id") and row.get("obj_id"):
                        try:
                            annocur.execute("SELECT full_path FROM tsk_file_full_paths WHERE obj_id = ? AND fs_obj_id = ?;", (row["obj_id"], row["fs_obj_id"]))
                        except TypeError:
                            dprint(repr(row))
                            raise
                        annorows = [row for row in annocur]
                    elif row.get("fiwalk_id"):
                        annocur.execute("""
                          SELECT
                            full_path
                          FROM
                            tsk_file_full_paths as fp,
                            fiwalk_id_to_tsk_obj_id as ftt
                          WHERE
                            fp.obj_id = ftt.tsk_obj_id AND
                            ftt.fiwalk_id = ?;
                        """, (row["fiwalk_id"],))
                        annorows = [row for row in annocur]

                    if len(annorows) != 1:
                        placemark_description_list.append("<td>(not found)</td>")
                    else:
                        placemark_description_list.append("<td>%s</td>" % annorows[0]["full_path"]) #TODO HTML-escape this string

                #The within-file record is formatted differently depending on the artifact type
                within_file_path = ""
                if row["source_table"] == "email_files_votes":
                    if row["message_index"] is not None:
                        within_file_path = "Message %d, " % (row["message_index"] + 1)
                    within_file_path += "<tt>Received</tt> header %d of %d" % (row["received_path_index"] + 1, row["received_path_length"])
                placemark_description_list.append("<td>%s</td>" % within_file_path)

                #Add precision
                for locfield in ["location", "country", "region", "city"]:
                    pcl = row.get("p_correct_" + locfield)
                    ncl = row.get("n_correct_" + locfield)
                    ntl = row.get("n_total_" + locfield)
                    if None in (pcl, ncl, ntl):
                        placemark_description_list.append("<td></td><td></td>")
                    else:
                        placemark_description_list.append("<td>%s</td>" % lite_float_string(100 * pcl))
                        placemark_description_list.append("<td>%s / %s</td>" % (lite_float_string(row["n_correct_" + locfield]), lite_float_string(row["n_total_" + locfield])))

                placemark_description_list.append("</tr>")
            placemark_description_list.append("</tbody></table>")

        placemark_description = "\n".join(placemark_description_list)

        print(kml_placemark % (
          placemark_name,
          placemark_description,
          longitude,
          latitude
        ))

    print(kml_foot)
Esempio n. 5
0
def main():
    global args

    #Set up lookup database connection
    cfg = geoproc_cfg.config
    lookupconn = None
    lookupcur = None
    try:
        import mysql.connector as mdb
        lookupconn = mdb.connect(
          host=cfg.get("mysql", "maxmind_server"),
          user=cfg.get("mysql", "maxmind_read_username"),
          password=geoproc_cfg.db_password("maxmind_read_password_file"),
          db=cfg.get("mysql", "maxmind_schema"),
          use_unicode=True
        )
        lookupcur = lookupconn.cursor(cursor_class=geoproc_cfg.MySQLCursorDict)
    except:
        sys.stderr.write("Warning: Could not connect to database. Proceeding without database support.\n")
        pass

    #Connect to annodb
    annoconn, annocur = geoproc_library.connect_to_fs_anno_db(args.annodb)

    #Verify input
    manifest_path = os.path.join(args.emaildir, "manifest.txt")
    if not os.path.isfile(manifest_path):
        raise Exception("Error: manifest.txt not found in input directory.")

    #Ingest BE ips, if available
    #Stash in (once-tested) histogram.
    #Dictionary key: ipv4 address
    #Dictionary value: (notes, tally) default dictionary.
    ip_notes_histogram = collections.defaultdict(lambda: collections.defaultdict(lambda: 0))
    if args.bulk_extractor_output:
        for (forensic_path, ipv4, ipv4_notes) in geoproc_library.bulk_extractor_ips(args.bulk_extractor_output):
            ip_notes_histogram[ipv4][ipv4_notes] += 1
    dprint("Debug: Number of IPv4s with notes: %d." % len(ip_notes_histogram.keys()))

    #Set up output database
    outdbpath = os.path.join(args.outdir, "email_files_votes.db")
    if os.path.isfile(outdbpath):
        raise Exception("Error: Output database already exists. This script won't overwrite. Aborting.")
    outconn = sqlite3.connect(outdbpath)
    outconn.isolation_level = "EXCLUSIVE"
    outconn.row_factory = sqlite3.Row
    outcur = outconn.cursor()
    outcur.execute(SQL_CREATE_EMAIL_FILES_VOTES)

    for (fiwalk_id, messageno, message) in emails_in_dir_manifest(manifest_path):
        dprint("Debug: Analyzing a record from fiwalk_id %r." % fiwalk_id)
        #print(repr(type(message)))
        #for i in message.keys():
        #    print('%r: %r' % (i, message.get_all(i)))
        received_recs = message.get_all("Received")
        if not received_recs:
            continue
        pathlength = len(received_recs)
        for (pathindex, pathline) in enumerate(received_recs):
            #TODO Just getting all the IPs for now; filter later
            ips = geoproc_library.all_ipv4s(pathline)
            dprint("Debug: Found this many IP's: %d.\n\t%r" % (len(ips), ips))
            
            #Can we get a date?
            maybe_timestamp = None
            maybe_timestamp_match = dfxml.rx_rfc822datetime.search(pathline)
            if maybe_timestamp_match:
                thestring = maybe_timestamp_match.string
                thespan = maybe_timestamp_match.span()
                thedatestring = thestring[thespan[0]:thespan[1]]
                try:
                    maybe_timestamp = dfxml.dftime(thedatestring)
                except:
                    sys.stderr.write("Warning: An error occured trying to parse time input.\nInput:%r\nStack trace:\n" % thedatestring)
                    sys.stderr.write(traceback.format_exc())
                    sys.stderr.write("\n")
                    #Don't stop here.
            dprint("Debug: Believed timestamp: %r." % maybe_timestamp)
            
            #Now that we have a date, can we get locations?
            if maybe_timestamp:

                #Can we get a single recipient?  (This is, of course, not guaranteed to be the owner.)
                sole_recipient = None
                delivered_to_headers = message.get_all("Delivered-To")
                to_headers = message.get_all("To")
                if delivered_to_headers and len(delivered_to_headers) == 1:
                    sole_recipient = delivered_to_headers[0]
                elif to_headers and len(to_headers) == 1 and len(to_headers[0].split("\n")) == 1:
                    sole_recipient = to_headers[0]
                all_ip_locations = geoproc_library.ips_to_locations(lookupcur, maybe_timestamp.datetime(), ips)
                dprint("Debug: Fetched these IP location records:\n\t%r" % all_ip_locations)
                for ip in ips:
                    outdict = {"fiwalk_id":fiwalk_id}
                    #TODO Use annodb to get TSK identifiers
                    outdict["message_index"] = messageno
                    outdict["ipv4"] = ip
                    outdict["received_path_index"] = pathindex
                    outdict["received_path_length"] = pathlength
                    outdict["received_header_text"] = pathline
                    outdict["database_queried"] = all_ip_locations is not None
                    outdict["believed_timestamp"] = str(maybe_timestamp)
                    outdict["sole_recipient_domain_is_webmail"] = geoproc_library.in_webmail_domain(sole_recipient)
                    if all_ip_locations is not None and ip in all_ip_locations:
                        rec = all_ip_locations[ip]
                        outdict["latitude"] = rec.get("latitude")
                        outdict["longitude"] = rec.get("longitude")
                        outdict["postalCode"] = rec.get("postalCode")
                        outdict["maxmind_ipv4_time"] = dfxml.dftime(rec.get("maxmind_ipv4_time")).iso8601()
                        if rec.get("country"):
                            outdict["country"] = rec["country"]
                        if rec.get("region"):
                            outdict["region"] = rec["region"]
                        if rec.get("city"):
                            outdict["city"] = rec["city"]
                        dprint("Debug: Checking for IP notes for %r." % ip)
                        if ip in ip_notes_histogram:
                            dprint("Debug: Formatting notes for %r." % ip)
                            notedict = ip_notes_histogram[ip]
                            notelist = sorted(notedict.keys())
                            notes_to_format = []
                            for note in notelist:
                                notes_to_format.append("%d %r" % (notedict[note], note))
                            outdict["ipv4_be_notes"] = "; ".join(notes_to_format)
                            outdict["ipv4_be_has_cksum_or_socket"] = "sockaddr" in outdict["ipv4_be_notes"] or "cksum-ok" in outdict["ipv4_be_notes"]
                        dprint("Debug: Outdict just before inserting:\n\t%r" % outdict)
                    geoproc_library.insert_db(outcur, "email_files_votes", outdict)
    outconn.commit()
    dprint("Debug: Done.")