/
ssdeep_snapshot.py
executable file
·232 lines (184 loc) · 6.15 KB
/
ssdeep_snapshot.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
#!/usr/bin/env python
"""
ssdeep_snapshot.py -- by Daniel Roberson @dmfroberson 7/4/2017
-- Gathers information about files and stores in a sqlite3
-- database for future analysis/comparison.
Currently very rough!!
TODO:
- argparse
-- verbose output -- shows full output for use with --stdin piping
- stdin option to run over network, but store on another machine ex:
-- ssh user@host ssdeep-snapshot.py /bin | ssdeep-snapshot --db=foo.db --stdin
This would run the snapshot tool for /bin on "host", but store it locally
within "foo.db".
- lookup tool: ssdeep-snapshot-lookup <database> <hostname> <path>
-- delete option for this tool to remove entries
-- update option for this tool to update entries
-- maybe just use sqlite3 instead of making tools? its easy enough..
- Script or instructions for importing into mysql/postgres (sqlfairy)
-- Useful for dealing with massive amounts of data
- Add actual usernames/groups in addition to uid/gid
- Change verbiage of "directories" because it also corresponds to files
- General code cleanup!!
"""
# needs python-ssdeep, python-magic, and python-sqlite
import os
import sys
import sqlite3
import socket
import stat
import argparse
import ssdeep
import hashlib
import magic
DEFAULT_DIRECTORIES = [
"/etc",
"/bin",
"/sbin",
"/usr/bin",
"/usr/sbin",
"/usr/local/bin",
"/usr/local/sbin"
]
HOSTNAME = socket.getfqdn()
def parse_cli():
""" parse_cli() -- parses CLI input
Args:
None
Returns:
ArgumentParser namespace relevant to supplied CLI input
"""
description = "example: ./ssdeep_snapshot.py [-v] [-d <file>] dir dir2 dirN"
parser = argparse.ArgumentParser(description=description)
parser.add_argument("-d",
"--database",
help="sqlite3 database file to store results",
default="ssdeep-snapshot.db")
parser.add_argument("-q",
"--quiet",
help="Suppress unnecessary output",
action="store_true")
parser.add_argument("--hostname",
help="Specify hostname identifier rather getfqdn()",
default=HOSTNAME)
parser.add_argument("directories",
help="Directories to walk",
nargs=argparse.REMAINDER,
action="store")
args = parser.parse_args()
return args
def add_db_record(cursor, filename, quiet):
"""
docstring
"""
skip_hash = False
absolute = os.path.abspath(filename)
try:
tempstat = os.stat(absolute)
perms = oct(tempstat.st_mode)
owner = tempstat.st_uid
group = tempstat.st_gid
size = tempstat.st_size
except OSError as err:
print "[-] Couldn't open %s: %s" % (absolute, err)
return False
# Skip hashing if the file is a FIFO, because the script will
# just hang forever trying to read data to calculate a hash.
if stat.S_ISFIFO(os.stat(absolute).st_mode):
skip_hash = True
fuzzy_hash = "FIFO"
md5digest = "FIFO"
sha1digest = "FIFO"
# Determine file type with libmagic
filetype = magic.detect_from_filename(absolute).name
if quiet is False:
print "[+] Adding %s -- %s" % (filename, filetype)
if skip_hash is False:
# Calculate ssdeep hash
try:
fuzzy_hash = ssdeep.hash_from_file(absolute)
except IOError:
fuzzy_hash = "PERMISSION DENIED"
except UnicodeDecodeError:
fuzzy_hash = "UNICODE DECODE ERROR"
# Calculate MD5 hash
md5hash = hashlib.md5()
md5hash.update(open(absolute).read())
md5digest = md5hash.hexdigest()
# Calculate SHA1 hash
sha1hash = hashlib.sha1()
sha1hash.update(open(absolute).read())
sha1digest = sha1hash.hexdigest()
cursor.execute(
"INSERT INTO hashes VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, DATETIME())",
(HOSTNAME,
absolute,
size,
perms,
owner,
group,
fuzzy_hash,
md5digest,
sha1digest,
filetype))
return True
def walk_directory(cursor, directory, quiet):
"""
docstring
"""
if not os.path.exists(directory):
print "[-] No such file or directory: %s" % directory
return False
# Process individual file
if not os.path.isdir(directory):
return add_db_record(cursor, directory, quiet)
# Walk directory
for dirname, _, filelist in os.walk(directory):
print "[+] Walking %s" % dirname
for filename in filelist:
fullname = os.path.join(dirname, filename)
sys.stdout.write(" ")
add_db_record(cursor, fullname, quiet)
return True
def main():
""" main()
Args:
None
Returns:
EX_OK on success
EX_USAGE on failure
"""
print "[+] ssdeep-snapshot.py -- By Daniel Roberson @dmfroberson"
print
global HOSTNAME
args = parse_cli()
dbfile = args.database
HOSTNAME = args.hostname
if args.directories:
directories = args.directories
else:
directories = DEFAULT_DIRECTORIES
print "[+] Using sqlite3 database file: %s" % dbfile
try:
con = sqlite3.connect(dbfile)
except sqlite3.OperationalError as err:
print "[-] sqlite3.connect(%s): %s" % (dbfile, err)
print "[-] Exiting."
return os.EX_USAGE
# This might be wrong, look into it!
con.text_factory = str
# Create schema if it doesn't exist
cursor = con.cursor()
cursor.execute(
"CREATE TABLE IF NOT EXISTS hashes(hostname TEXT, filename TEXT, " + \
"size INT, perm INT, uid TEXT, gid TEXT, hash_ssdeep TEXT, " + \
"hash_md5 TEXT, hash_sha1 TEXT, filetype TEXT, date_added DATEIME)")
# Walk the supplied directories/filenames
for directory in directories:
walk_directory(cursor, directory, args.quiet)
con.commit()
con.close()
print "[+] Done."
return os.EX_OK
if __name__ == "__main__":
exit(main())