def check_ip_using_virustotal(log_instance, api_key, user, password, server, port, ip):
	try:
		# class instance
		client_instance = Virustotal(api_key, debug=0, error=0)
		proxy_handle = None
		# check if all proxy parameters are entered or not.
		if server and port and user and password:
			# setup proxy
			proxy_handle = client_instance.setup_proxy(server, port, user, password)
		else:
			log_instance.warning(
				"No proxy parameters are not entered and hence,a direct network connection to internet is assumed for \
		         checking ip against virustotal database.")
			# IP report
		if proxy_handle:
			#check url
			response = client_instance.ip_reporter(ip, proxy_handle)
		else:
			#no proxy
			response = client_instance.ip_reporter(ip)
			# check if malware presence is seen by virustotal scan
		if 'detected_urls' in response:
			return True
		else:
			return False
	except Exception, e:
		log_instance.error("Error while checking IP against Virustotal database-%s" % str(e).strip())
예제 #2
0
def check_ip_using_virustotal(log_instance, api_key, user, password, server,
                              port, ip):
    try:
        # class instance
        client_instance = Virustotal(api_key, debug=0, error=0)
        proxy_handle = None
        # check if all proxy parameters are entered or not.
        if server and port and user and password:
            # setup proxy
            proxy_handle = client_instance.setup_proxy(server, port, user,
                                                       password)
        else:
            log_instance.warning(
                "No proxy parameters are not entered and hence,a direct network connection to internet is assumed for \
		         checking ip against virustotal database.")
            # IP report
        if proxy_handle:
            #check url
            response = client_instance.ip_reporter(ip, proxy_handle)
        else:
            #no proxy
            response = client_instance.ip_reporter(ip)
            # check if malware presence is seen by virustotal scan
        if 'detected_urls' in response:
            return True
        else:
            return False
    except Exception, e:
        log_instance.error(
            "Error while checking IP against Virustotal database-%s" %
            str(e).strip())
	def __init__(self):
		self.session  = None
		if config.get("submit_to_vt"):
			self.vt = Virustotal(config.get("vt_key", optional=True))
		else:
			self.vt = None
		self.cuckoo   = Cuckoo(config)
		self.do_ip_to_asn_resolution = config.get("do_ip_to_asn_resolution", optional=True, default=True)
예제 #4
0
    def __init__(self):
        self.vt = Virustotal()
        self.vt_on = False
        self.vt_queue = Queue.Queue()
        self.vt_worker = None

        self.dir = "samples/"
        self.sql = sqlite3.connect("samples.db")
        self.setup_db()

        self.url_recheck = 3600 * 24  # 1 day
        self.sh_re = re.compile(".*\\.sh$")
        self.dl_re = re.compile(".*wget (?:-[a-zA-Z] )?(http[^ ;><&]*).*")
    def __init__(self):
        self.session = None
        if config.get("submit_to_vt"):
            self.vt = Virustotal(config.get("vt_key", optional=True))
        else:
            self.vt = None
        self.cuckoo = Cuckoo(config)

        self.do_ip_to_asn_resolution = False
        self.ip2asn = config.get("ip_to_asn_resolution",
                                 optional=True,
                                 default=True)
        if self.ip2asn == "offline":
            self.do_ip_to_asn_resolution = True
            self.fill_db_ipranges()
        if self.ip2asn == "online":
            self.do_ip_to_asn_resolution = True
예제 #6
0
def main(a):
    if a['init']:
        keydict = {}
        keydict['virustotal'] = a['KEY'][0]
        if a['--googl']:
            keydict['googl'] = a['KEY'][1]
        createconfig(keydict)
    else:
        if a['--api-key'] is not None:
            key = a['--api-key']
        else:
            key = readconfig('virustotal')
            if key is None:
                exit(1)
        vtc = Virustotal(key)
        if a['report']:
            if a['file'] or a['hash']:
                if a['hash']:
                    filehash = a['<resource>']
                else:
                    filehash = gethash(a['<resource>'])
                output(vtc.rscReport(filehash))
            elif a['url']:
                output(vtc.urlReport(a['<resource>']))
            elif a['ip']:
                output(vtc.ipReport(a['<resource>']))
            elif a['domain']:
                output(vtc.domainReport(a['<resource>']))
        elif a['scan']:
            if a['file']:
                if a['--rescan']:
                    filehash = gethash(a['<resource>'])
                    output(vtc.rscRescan(filehash))
                else:
                    output(vtc.rscSubmit(a['<resource>']))
            elif a['url']:
                output(vtc.scanURL(a['<resource>']))
        elif a['sha256']:
            print gethash(a['<file>'])
        elif a['signature']:
            print getsignature(a['<file>'])
        elif a['hexdump']:
            print dumphex(a['<file>'])
        else:
            exit(1)
예제 #7
0
def main(a):
    if a['init']:
        keydict = {}
        keydict['virustotal'] = a['KEY'][0]
        if a['--googl']:
            keydict['googl'] = a['KEY'][1]
        createconfig(keydict)
    else:
        if a['--api-key'] is not None:
            key = a['--api-key']
        else:
            key = readconfig('virustotal')
            if key is None:
                exit(1)
        vtc = Virustotal(key)
        if a['report']:
            if a['file'] or a['hash']:
                if a['hash']:
                    filehash = a['<resource>']
                else:
                    filehash = gethash(a['<resource>'])
                output(vtc.rscReport(filehash))
            elif a['url']:
                output(vtc.urlReport(a['<resource>']))
            elif a['ip']:
                output(vtc.ipReport(a['<resource>']))
            elif a['domain']:
                output(vtc.domainReport(a['<resource>']))
        elif a['scan']:
            if a['file']:
                if a['--rescan']:
                    filehash = gethash(a['<resource>'])
                    output(vtc.rscRescan(filehash))
                else:
                    output(vtc.rscSubmit(a['<resource>']))
            elif a['url']:
                output(vtc.scanURL(a['<resource>']))
        elif a['sha256']:
            print gethash(a['<file>'])
        elif a['signature']:
            print getsignature(a['<file>'])
        elif a['hexdump']:
            print dumphex(a['<file>'])
        else:
            exit(1)
class ClientController:
    def __init__(self, web):
        self.web = web
        self.session = None
        if config.get("submit_to_vt"):
            self.vt = Virustotal(config.get("vt_key", optional=True))
        else:
            self.vt = None
        self.cuckoo = Cuckoo(config)
        self.do_ip_to_asn_resolution = config.get("do_ip_to_asn_resolution",
                                                  optional=True,
                                                  default=True)

    def get_asn(self, asn):
        asn_obj = self.session.query(ASN).filter(ASN.asn == asn).first()

        if asn_obj:
            return asn_obj.json(depth=1)
        else:
            asn_info = get_asn_info(asn)
            if asn_info:
                asn_obj = ASN(asn=asn,
                              name=asn_info['name'],
                              reg=asn_info['reg'],
                              country=asn_info['country'])
                self.session.add(asn_obj)
                return asn_obj.json(depth=1)

    @db_wrapper
    def put_domain(self, domain):
        if self.vt != None:
            report = self.vt.query_domain_reports(domain)
        if report != None:
            domainReport = DomainReport(domain=domain,
                                        report=json.dumps(report))
            self.session.add(domainReport)
            self.session.flush()

    @db_wrapper
    def put_session(self, session):
        ipinfo = None
        asn = None
        block = None
        country = None

        if self.do_ip_to_asn_resolution:
            ipinfo = get_ip_info(session["ip"])
            if ipinfo:
                asn_obj = self.get_asn(ipinfo["asn"])
                asn = ipinfo["asn"]
                block = ipinfo["ipblock"]
                country = ipinfo["country"]

        report = {}
        if self.vt != None:
            if self.web.get_ip_report(session["ip"]) == None:
                report = self.vt.query_ip_reports(session["ip"])
                ipReport = IpReport(ip=session["ip"],
                                    report=json.dumps(report))
                self.session.add(ipReport)

        # Calculate "hash"
        connhash = ""
        for event in session["stream"]:
            if event["in"]:
                line = event["data"]
                line = ''.join(char for char in line
                               if ord(char) < 128 and ord(char) > 32)
                if line != "":
                    linehash = abs(hash(line)) % 0xFFFF
                    connhash += struct.pack("!H", linehash)
        connhash = connhash.encode("hex")

        backend_user = self.session.query(User).filter(
            User.username == session["backend_username"]).first()

        conn = Connection(ip=session["ip"],
                          user=session["user"],
                          date=session["date"],
                          password=session["pass"],
                          stream=json.dumps(session["stream"]),
                          asn_id=asn,
                          ipblock=block,
                          country=country,
                          connhash=connhash,
                          backend_user_id=backend_user.id)

        self.session.add(conn)
        self.session.flush()

        req_urls = []
        set_urls = set(session["urls"])
        for url in set_urls:
            db_url = self.db.get_url(url).fetchone()
            url_id = 0

            report = ''
            parsed_uri = urlparse(url)
            domain = '{uri.netloc}'.format(uri=parsed_uri)
            if self.vt != None:
                report = self.vt.query_domain_reports(domain)
            domainReport = DomainReport(domain=domain,
                                        report=json.dumps(report))
            self.session.add(domainReport)

            if db_url == None:
                url_ip = None
                url_asn = None
                url_country = None

                if self.do_ip_to_asn_resolution:
                    url_ip, url_info = get_url_info(url)
                    if url_info:
                        asn_obj_url = self.get_asn(url_info["asn"])
                        url_asn = url_info["asn"]
                        url_country = url_info["country"]

                url_id = self.db.put_url(url, session["date"], url_ip, url_asn,
                                         url_country)
                req_urls.append(url)

            elif db_url["sample"] == None:
                req_urls.append(url)
                url_id = db_url["id"]

            else:
                # Sample exists already
                # TODO: Check url for oldness
                url_id = db_url["id"]

            self.db.link_conn_url(conn.id, url_id)

        # Find previous connections
        # A connection is associated when:
        #  - same honeypot/user
        #  - connection happened as long as 120s before
        #  - same client ip OR same username/password combo
        assoc_timediff = 120
        previous_conns = (self.session.query(Connection).filter(
            Connection.date > (conn.date - assoc_timediff),
            or_(
                and_(Connection.user == conn.user,
                     Connection.password == conn.password),
                Connection.ip == conn.ip),
            Connection.backend_user_id == conn.backend_user_id,
            Connection.id != conn.id).all())

        for prev in previous_conns:
            conn.conns_before.append(prev)

        # Check connection against all tags
        tags = self.session.query(Tag).all()
        conn = self.session.query(Connection).filter(
            Connection.id == conn.id).first()
        for tag in tags:
            json_obj = conn.json(depth=0)
            json_obj["text_combined"] = filter_ascii(json_obj["text_combined"])
            if simple_eval(tag.code, names=json_obj) == True:
                self.db.link_conn_tag(conn.id, tag.id)

        return req_urls

    @db_wrapper
    def put_sample_info(self, f):
        url = f["url"]
        url_id = self.db.get_url(url).fetchone()["id"]

        result = None
        try:
            if self.vt != None:
                vtobj = self.vt.query_hash_sha256(f["sha256"])
                if vtobj:
                    result = str(vtobj["positives"]) + "/" + str(
                        vtobj["total"]) + " " + self.vt.get_best_result(vtobj)
        except:
            pass

        sample_id = self.db.put_sample(f["sha256"], f["name"], f["length"],
                                       f["date"], f["info"], result)
        self.db.link_url_sample(url_id, sample_id)
        return f

    @db_wrapper
    def put_sample(self, data):
        sha256 = hashlib.sha256(data).hexdigest()
        self.db.put_sample_data(sha256, data)
        if config.get("cuckoo_enabled"):
            self.cuckoo.upload(os.path.join(config.get("sample_dir"), sha256),
                               sha256)
        elif config.get("submit_to_vt"):
            self.vt.upload_file(os.path.join(config.get("sample_dir"), sha256),
                                sha256)

    @db_wrapper
    def update_vt_result(self, sample_sha):
        sample = self.session.query(Sample).filter(
            Sample.sha256 == sample_sha).first()
        if sample:
            vtobj = self.vt.query_hash_sha256(sample_sha)
            if vtobj:
                sample.result = str(vtobj["positives"]) + "/" + str(
                    vtobj["total"]) + " " + self.vt.get_best_result(vtobj)
                return sample.json(depth=1)
        return None
예제 #9
0
class Sampledb:
    def __init__(self):
        self.vt = Virustotal()
        self.vt_on = False
        self.vt_queue = Queue.Queue()
        self.vt_worker = None

        self.dir = "samples/"
        self.sql = sqlite3.connect("samples.db")
        self.setup_db()

        self.url_recheck = 3600 * 24  # 1 day
        self.sh_re = re.compile(".*\\.sh$")
        self.dl_re = re.compile(".*wget (?:-[a-zA-Z] )?(http[^ ;><&]*).*")

    def enable_vt(self):
        self.vt_on = True
        self.vt_worker = threading.Thread(target=self.vt_work)
        # self.vt_worker.deamon = True
        self.vt_worker.start()

    def setup_db(self):
        self.sql.execute(
            "CREATE TABLE IF NOT EXISTS samples    (id INTEGER PRIMARY KEY AUTOINCREMENT, sha256 TEXT UNIQUE, date INTEGER, name TEXT, file TEXT, result TEXT)"
        )
        self.sql.execute(
            "CREATE TABLE IF NOT EXISTS urls       (id INTEGER PRIMARY KEY AUTOINCREMENT, url TEXT UNIQUE, date INTEGER, sample INTEGER)"
        )
        self.sql.execute(
            "CREATE TABLE IF NOT EXISTS conns      (id INTEGER PRIMARY KEY AUTOINCREMENT, ip TEXT, date INTEGER, user TEXT, pass TEXT)"
        )
        self.sql.execute(
            "CREATE TABLE IF NOT EXISTS conns_urls (id_conn INTEGER, id_url INTEGER)"
        )

    def db_add_url(self, url, date):
        c = self.sql.cursor()
        c.execute("INSERT INTO urls VALUES (NULL,?,?,NULL)", (url, date))
        id_url = c.lastrowid
        self.sql.commit()
        return id_url

    def db_link_url_conn(self, id_url, id_conn):
        self.sql.execute("INSERT INTO conns_urls VALUES (?,?)",
                         (id_conn, id_url))
        self.sql.commit()

    def db_add_sample(self, sha256, date, name, filename, id_url, length):
        c = self.sql.cursor()
        c.execute("INSERT INTO samples VALUES (NULL,?,?,?,?,?)",
                  (sha256, date, name, filename, length))
        id_sample = c.lastrowid
        self.db_url_set_sample(id_sample, id_url)
        self.sql.commit()
        return id_sample

    def db_url_set_sample(self, id_sample, id_url):
        self.sql.execute("UPDATE urls SET sample = ? WHERE id = ?",
                         (id_sample, id_url))
        self.sql.commit()

    def db_get_url(self, url):
        url = self.sql.execute("SELECT * FROM urls WHERE url = ?", (url, ))
        url = url.fetchone()
        return url

    def db_get_sample(self, sha256):
        url = self.sql.execute("SELECT * FROM samples WHERE sha256 = ?",
                               (sha256, ))
        url = url.fetchone()
        return url

    def put_conn(self, ip, user, password, date=None):
        if date == None:
            date = int(time.time())
        c = self.sql.cursor()
        c.execute("INSERT INTO conns VALUES (NULL,?,?,?,?)",
                  (ip, date, user, password))
        id_conn = c.lastrowid
        self.sql.commit()
        return id_conn

    def put_url(self, url, id_conn):
        dbg("New Url " + url)

        db_url = self.db_get_url(url)
        if db_url:
            id_url = db_url[0]

            ts = db_url[2]
            now = int(time.time())
            if (now - ts > self.url_recheck):
                dbg("Re-Checking Url")
                self.sql.execute("UPDATE urls SET date = ? WHERE id = ?",
                                 (now, id_url))
                self.sql.commit()
            else:
                dbg("Url already known")
                self.db_link_url_conn(id_url, id_conn)
                return
        else:
            id_url = self.db_add_url(url, int(time.time()))

        self.db_link_url_conn(id_url, id_conn)

        f = self.download(url)
        if f["len"] < 5000 or self.sh_re.match(f["name"]):
            with open(f["file"], "rb") as fd:
                for line in fd:
                    m = self.dl_re.match(line)
                    if m:
                        dbg("Found link in File. Downloading ...")
                        self.put_url(m.group(1), id_conn)

        sample = self.db_get_sample(f["sha256"])
        if sample:
            self.db_url_set_sample(sample[0], id_url)
            dbg("Hash already known")
            os.remove(f["file"])
            return

        self.db_add_sample(f["sha256"], f["date"], f["name"], f["file"],
                           id_url, f["length"])

        if self.vt_on:
            dbg("ANALYZE")
            self.vt_analyze(f)

    def vt_analyze(self, f):
        self.vt_queue.put(f)

    def vt_work(self):
        dbg("Virustotal uploader started")
        while True:
            f = self.vt_queue.get()
            if f == "!STOP!":
                self.vt_queue.task_done()
                dbg("Stopping worker")
                return
            scan = self.vt.query_hash_sha256(f["sha256"])
            if scan:
                pass
            else:
                self.vt.upload_file(f["file"], f["name"])
            self.vt_queue.task_done()

    def stop(self):
        if self.vt_on:
            dbg("Waiting for virustotal queue to be empty")
            self.vt_queue.put("!STOP!")
            self.vt_queue.join()

    def download(self, url):
        url = url.strip()
        dbg("Downloading " + url)
        hdr = {"User-Agent": "Wget/1.15 (linux-gnu)"}
        r = requests.get(url, stream=True, timeout=5.0)
        f = {}
        h = hashlib.sha256()

        f["name"] = url.split("/")[-1].strip()
        f["date"] = int(time.time())
        f["len"] = 0
        if len(f["name"]) < 1:
            f["name"] = "index.html"

        f["file"] = self.dir + str(f["date"]) + "_" + f["name"]

        for his in r.history:
            dbg("HTTP Response " + str(his.status_code))
            for k, v in his.headers.iteritems():
                dbg("HEADER " + k + ": " + v)

        dbg("HTTP Response " + str(r.status_code))
        for k, v in r.headers.iteritems():
            dbg("HEADER " + k + ": " + v)

        with open(f["file"], 'wb') as fd:
            for chunk in r.iter_content(chunk_size=4096):
                f["len"] = f["len"] + len(chunk)
                fd.write(chunk)
                h.update(chunk)

        f["sha256"] = h.hexdigest()
        dbg("Downlod finished. length: " + str(f["len"]) + " sha256: " +
            f["sha256"])

        return f
class ClientController:
    def __init__(self):
        self.session = None
        if config.get("submit_to_vt"):
            self.vt = Virustotal(config.get("vt_key", optional=True))
        else:
            self.vt = None
        self.cuckoo = Cuckoo(config)

        self.do_ip_to_asn_resolution = False
        self.ip2asn = config.get("ip_to_asn_resolution",
                                 optional=True,
                                 default=True)
        if self.ip2asn == "offline":
            self.do_ip_to_asn_resolution = True
            self.fill_db_ipranges()
        if self.ip2asn == "online":
            self.do_ip_to_asn_resolution = True

    @db_wrapper
    def _get_asn(self, asn_id):
        asn_obj = self.session.query(ASN).filter(ASN.asn == asn_id).first()

        if asn_obj:
            return asn_obj
        else:
            asn_info = additionalinfo.get_asn_info(asn_id)
            if asn_info:
                asn_obj = ASN(asn=asn_id,
                              name=asn_info['name'],
                              reg=asn_info['reg'],
                              country=asn_info['country'])
                self.session.add(asn_obj)
                return asn_obj

        return None

    def calc_connhash_similiarity(self, h1, h2):
        l = min(len(h1), len(h2))
        r = 0
        for i in range(0, l):
            r += int(h1[i] != h2[i])

        if l == 0: return 0
        return float(r) / float(l)

    def calc_connhash(self, stream):
        output = ""
        for event in stream:
            if event["in"]:
                line = event["data"]
                line = line.strip()
                parts = line.split(" ")
                for part in parts:
                    part_hash = chr(hash(part) % 0xFF)
                    output += part_hash

        # Max db len is 256, half because of hex encoding
        return output[:120]

    @db_wrapper
    def fill_db_ipranges(self):
        if self.session.query(IPRange.ip_min).count() != 0:
            return

        print "Filling IPRange Tables"

        asntable = ipdb.ipdb.get_asn()
        progress = 0

        for row in ipdb.ipdb.get_geo_iter():
            progress += 1
            if progress % 1000 == 0:
                self.session.commit()
                self.session.flush()
                print str(100.0 * float(row[0]) / 4294967296.0) + "% / " + str(
                    100.0 * progress / 3315466) + "%"

            ip = IPRange(ip_min=int(row[0]), ip_max=int(row[1]))

            ip.country = row[2]
            ip.region = row[4]
            ip.city = row[5]
            ip.zipcode = row[8]
            ip.timezone = row[9]
            ip.latitude = float(row[6])
            ip.longitude = float(row[7])

            asn_data = asntable.find_int(ip.ip_min)

            if asn_data:
                asn_id = int(asn_data[3])
                asn_db = self.session.query(ASN).filter(
                    ASN.asn == asn_id).first()

                if asn_db == None:
                    asn_db = ASN(asn=asn_id,
                                 name=asn_data[4],
                                 country=ip.country)
                    self.session.add(asn_db)

                ip.asn = asn_db

                # Dont add session if we cannot find an asn for it
                self.session.add(ip)

        print "IPranges loaded"

    @db_wrapper
    def get_ip_range_offline(self, ip):
        ip_int = ipdb.ipdb.ipstr2int(ip)

        range = self.session.query(IPRange).filter(
            and_(IPRange.ip_min <= ip_int, ip_int <= IPRange.ip_max)).first()

        return range

    def get_ip_range_online(self, ip):

        addinfo = additionalinfo.get_ip_info(ip)

        if addinfo:

            # TODO: Ugly hack
            range = type('', (object, ), {})()

            range.country = addinfo["country"]
            range.city = "Unknown"
            range.latitude = 0
            range.longitude = 0
            range.asn_id = int(addinfo["asn"])
            range.asn = self._get_asn(range.asn_id)
            range.cidr = addinfo["ipblock"]

            return range

        else:

            return None

    def get_ip_range(self, ip):
        if self.ip2asn == "online":
            return self.get_ip_range_online(ip)
        else:
            return self.get_ip_range_offline(ip)

    def get_url_info(self, url):
        parsed = urlparse.urlparse(url)
        host = parsed.netloc.split(':')[0]

        if host[0].isdigit():
            ip = host
        else:
            try:
                ip = socket.gethostbyname(host)
            except:
                return None

        range = self.get_ip_range(ip)
        return ip, range

    @db_wrapper
    def do_housekeeping(self):

        for malware in self.session.query(Malware).all():
            malware.name = random.choice(ANIMAL_NAMES)

        # rebuild nb_firstconns
        if False:

            net_cache = {}

            for conn in self.session.query(Connection).all():
                if len(conn.conns_before) == 0:
                    if conn.network_id in net_cache:
                        net_cache[conn.network_id] += 1
                    else:
                        net_cache[conn.network_id] = 1

            for network in self.session.query(Network).all():
                if network.id in net_cache:
                    network.nb_firstconns = net_cache[network.id]
                else:
                    network.nb_firstconns = 0

                print "Net " + str(network.id) + ": " + str(
                    network.nb_firstconns)

    @db_wrapper
    def put_session(self, session):

        connhash = self.calc_connhash(session["stream"]).encode("hex")

        backend_user = self.session.query(User).filter(
            User.username == session["backend_username"]).first()

        conn = Connection(ip=session["ip"],
                          user=session["user"],
                          date=session["date"],
                          password=session["pass"],
                          stream=json.dumps(session["stream"]),
                          connhash=connhash,
                          backend_user_id=backend_user.id)

        conn.user = filter_ascii(conn.user)
        conn.password = filter_ascii(conn.password)

        if self.do_ip_to_asn_resolution:
            range = self.get_ip_range(conn.ip)
            if range:
                conn.country = range.country
                conn.city = range.city
                conn.lat = range.latitude
                conn.lon = range.longitude
                conn.asn = range.asn

        self.session.add(conn)
        self.session.flush()  # to get id

        network_id = None

        samples = []
        urls = []
        for sample_json in session["samples"]:
            # Ignore junk - may clean up the db a bit
            if sample_json["length"] < 2000:
                continue

            sample, url = self.create_url_sample(sample_json)

            if sample:
                if network_id == None and sample.network_id != None:
                    network_id = sample.network_id
                samples.append(sample)

            if url:
                if network_id == None and url.network_id != None:
                    network_id = url.network_id
                conn.urls.append(url)
                urls.append(url)

        # Find previous connections
        # A connection is associated when:
        #  - same honeypot/user
        #  - connection happened as long as 120s before
        #  - same client ip OR same username/password combo
        assoc_timediff = 120
        assoc_timediff_sameip = 3600

        previous_conns = (self.session.query(Connection).filter(
            or_(
                and_(Connection.date > (conn.date - assoc_timediff),
                     Connection.user == conn.user,
                     Connection.password == conn.password),
                and_(Connection.date > (conn.date - assoc_timediff_sameip),
                     Connection.ip == conn.ip)),
            Connection.backend_user_id == conn.backend_user_id,
            Connection.id != conn.id).all())

        for prev in previous_conns:
            if network_id == None and prev.network_id != None:
                network_id = prev.network_id
            conn.conns_before.append(prev)

        # Check connection against all tags
        tags = self.session.query(Tag).all()
        for tag in tags:
            json_obj = conn.json(depth=0)
            json_obj["text_combined"] = filter_ascii(json_obj["text_combined"])
            if simple_eval(tag.code, names=json_obj) == True:
                self.db.link_conn_tag(conn.id, tag.id)

        # Only create new networks for connections with urls or associtaed conns,
        # to prevent the creation of thousands of networks
        # NOTE: only conns with network == NULL will get their network updated
        #       later so whe should only create a network where we cannot easily
        #       change it later
        haslogin = conn.user != None and conn.user != ""
        if (len(conn.urls) > 0 or
                len(previous_conns) > 0) and network_id == None and haslogin:
            print(" --- create network --- ")
            network_id = self.create_network().id

        # Update network on self
        conn.network_id = network_id

        # Update network on all added Urls
        for url in urls:
            if url.network_id == None:
                url.network_id = network_id

        # Update network on all added Samples
        for sample in samples:
            if sample.network_id == None:
                sample.network_id = network_id

        # Update network on all previous connections withut one
        if network_id != None:
            for prev in previous_conns:
                if prev.network_id == None:
                    prev.network_id = network_id

                    # Update number of first conns on network
                    if len(prev.conns_before) == 0:
                        conn.network.nb_firstconns += 1

        self.session.flush()

        # Check for Malware type
        # 	only if our network exists AND has no malware associated
        if conn.network != None and conn.network.malware == None:
            # Find connections with similar connhash
            similar_conns = (self.session.query(Connection).filter(
                func.length(Connection.connhash) == len(connhash)).all())

            min_sim = 2
            min_conn = None
            for similar in similar_conns:
                if similar.network_id != None:
                    c1 = connhash.decode("hex")
                    c2 = similar.connhash.decode("hex")
                    sim = self.calc_connhash_similiarity(c1, c2)
                    if sim < min_sim and similar.network.malware != None:
                        min_sim = sim
                        min_conn = similar

            # 0.9: 90% or more words in session are equal
            #	think this is probably the same kind of malware
            #	doesn't need to be the same botnet though!
            if min_sim < 0.9:
                conn.network.malware = min_conn.network.malware
            else:
                conn.network.malware = Malware()
                conn.network.malware.name = random.choice(ANIMAL_NAMES)

                self.session.add(conn.network.malware)
                self.session.flush()

        # Update network number of first connections
        if len(previous_conns) == 0 and conn.network_id != None:
            conn.network.nb_firstconns += 1

        return conn.json(depth=1)

    @db_wrapper
    def create_network(self):
        net = Network()
        self.session.add(net)
        self.session.flush()
        return net

    def create_url_sample(self, f):
        url = self.session.query(Url).filter(Url.url == f["url"]).first()
        if url == None:
            url_ip = None
            url_asn = None
            url_country = None

            if self.do_ip_to_asn_resolution:
                url_ip, url_range = self.get_url_info(f["url"])
                if url_range:
                    url_asn = url_range.asn_id
                    url_country = url_range.country

            url = Url(url=f["url"],
                      date=f["date"],
                      ip=url_ip,
                      asn_id=url_asn,
                      country=url_country)
            self.session.add(url)

        if f["sha256"] != None:
            sample = self.session.query(Sample).filter(
                Sample.sha256 == f["sha256"]).first()
            if sample == None:
                result = None
                try:
                    if self.vt != None:
                        vtobj = self.vt.query_hash_sha256(f["sha256"])
                        if vtobj:
                            result = str(vtobj["positives"]) + "/" + str(
                                vtobj["total"]
                            ) + " " + self.vt.get_best_result(vtobj)
                except:
                    pass

                sample = Sample(sha256=f["sha256"],
                                name=f["name"],
                                length=f["length"],
                                date=f["date"],
                                info=f["info"],
                                result=result)
                self.session.add(sample)

            if sample.network_id != None and url.network_id == None:
                url.network_id = sample.network_id

            if sample.network_id == None and url.network_id != None:
                sample.network_id = url.network_id
        else:
            sample = None

        url.sample = sample

        return sample, url

    @db_wrapper
    def put_sample(self, data):
        sha256 = hashlib.sha256(data).hexdigest()
        self.db.put_sample_data(sha256, data)
        if config.get("cuckoo_enabled"):
            self.cuckoo.upload(os.path.join(config.get("sample_dir"), sha256),
                               sha256)
        elif config.get("submit_to_vt"):
            self.vt.upload_file(os.path.join(config.get("sample_dir"), sha256),
                                sha256)

    @db_wrapper
    def update_vt_result(self, sample_sha):
        sample = self.session.query(Sample).filter(
            Sample.sha256 == sample_sha).first()
        if sample:
            vtobj = self.vt.query_hash_sha256(sample_sha)
            if vtobj:
                sample.result = str(vtobj["positives"]) + "/" + str(
                    vtobj["total"]) + " " + self.vt.get_best_result(vtobj)
                return sample.json(depth=1)
        return None
예제 #11
0
class ClientController:
    def __init__(self):
        self.vt = Virustotal(config["vt_key"])
        self.db = None
        self.sess = None

    def get_asn(self, asn):
        asn_obj = self.session.query(ASN).filter(ASN.asn == asn).first()

        if asn_obj:
            return asn_obj.json(depth=1)
        else:
            asn_info = get_asn_info(asn)
            if asn_info:
                asn_obj = ASN(asn=asn,
                              name=asn_info['name'],
                              reg=asn_info['reg'],
                              country=asn_info['country'])
                self.session.add(asn_obj)
                return asn_obj.json(depth=1)

    @db_wrapper
    def put_session(self, session):
        ipinfo = get_ip_info(session["ip"])
        asn = None
        block = None
        country = None

        if ipinfo:
            asn_obj = self.get_asn(ipinfo["asn"])
            asn = ipinfo["asn"]
            block = ipinfo["ipblock"]
            country = ipinfo["country"]

        s_id = self.db.put_conn(session["ip"], session["user"],
                                session["pass"], session["date"],
                                session["text_combined"], asn, block, country)
        req_urls = []

        for url in session["urls"]:
            db_url = self.db.get_url(url).fetchone()
            url_id = 0

            if db_url == None:
                url_ip, url_info = get_url_info(url)
                url_asn = None
                url_country = None

                if url_info:
                    asn_obj_url = self.get_asn(url_info["asn"])
                    url_asn = url_info["asn"]
                    url_country = url_info["country"]

                url_id = self.db.put_url(url, session["date"], url_ip, url_asn,
                                         url_country)
                req_urls.append(url)

            elif db_url["sample"] == None:
                req_urls.append(url)
                url_id = db_url["id"]

            else:
                # Sample exists already
                # TODO: Check url for oldness
                url_id = db_url["id"]

            self.db.link_conn_url(s_id, url_id)

        return req_urls

    @db_wrapper
    def put_sample_info(self, f):
        url = f["url"]
        url_id = self.db.get_url(url).fetchone()["id"]

        result = None
        try:
            vtobj = self.vt.query_hash_sha256(f["sha256"])
            if vtobj:
                result = str(vtobj["positives"]) + "/" + str(
                    vtobj["total"]) + " " + self.vt.get_best_result(vtobj)
        except:
            pass

        sample_id = self.db.put_sample(f["sha256"], f["name"], f["length"],
                                       f["date"], f["info"], result)
        self.db.link_url_sample(url_id, sample_id)
        return f

    @db_wrapper
    def put_sample(self, data):
        sha256 = hashlib.sha256(data).hexdigest()
        self.db.put_sample_data(sha256, data)
예제 #12
0
def table(data):
    try:
        googlkey = readconfig('googl')
        g = Googl(googlkey)
        shorten = True
    except:
        googlkey = None
        shorten = False
    metatable = PrettyTable()
    metafields = collections.OrderedDict()
    if arguments['report']:
        if not arguments['ip'] and not arguments['domain']:
            if arguments['url']:
                metafields['URL'] = data['url']
            elif arguments['file'] or arguments['hash']:
                metafields['MD5'] = data['md5']
                metafields['SHA1'] = data['sha1']
                metafields['SHA256'] = data['sha256']
            if int(data['positives']) > (int(data['total']) / 2):
                c = red
            else:
                c = green
            detectionratio = '{0}/{1}'.format(data['positives'],
                                              data['total'])
            metafields['Detection ratio'] = '{0}'.format(detectionratio)
            metafields['Analysis date'] = data['scan_date']
            metafields['Scan id'] = data['scan_id']
            if shorten:
                link = g.shorten(data['permalink'])['id']
            else:
                link = data['permalink']
            metafields['Link'] = link
            for f in metafields:
                col = green
                if f == 'Detection ratio':
                    col = c
                metatable.add_row([colorize(colorize(f, blue), bold),
                                   colorize(str(metafields[f]), col)])
            metatable.align = "l"
            metatable.header = False
            print metatable
            scans = data['scans']
            scanstable = PrettyTable(colorize(colorize(['Engine',
                                                        'Detected',
                                                        'Result',
                                                        'Detail'],
                                              blue), bold))
            for key in scans.keys():
                engine = key
                detected = scans[key]['detected']
                result = scans[key]['result']
                if 'detail' in scans[key]:
                    if shorten:
                        detail = g.shorten(scans[key]['detail'])['id']
                    else:
                        detail = scans[key]['detail']
                else:
                    detail = None
                if detected:
                    scanstable.add_row(colorize([engine,
                                                 detected,
                                                 result,
                                                 detail], red))
                else:
                    scanstable.add_row(colorize([engine,
                                                 detected,
                                                 result,
                                                 detail], green))
            scanstable.align = "l"
            print scanstable
        elif arguments['ip'] or arguments['domain']:
            if arguments['ip']:
                headtype = 'Hostname'
                headtype2 = 'hostname'
                if 'asn' in data:
                    metafields['AS owner'] = data['as_owner']
                    metafields['ASN'] = data['asn']
                    metafields['Country'] = data['country']
                    for f in metafields:
                        metatable.add_row([colorize(colorize(f, blue), bold),
                                           colorize(str(metafields[f]),
                                                    green)])
                    metatable.align = "l"
                    metatable.header = False
                    print metatable
            elif arguments['domain']:
                headtype = 'IP address'
                headtype2 = 'ip_address'
                cattable = PrettyTable(colorize(colorize(['Categories'],
                                                         blue), bold))
                for c in data['categories']:
                    cattable.add_row([colorize(str(c), green)])
                cattable.align = "l"
                print cattable
                if 'WOT domain info' in data:
                    print 'WOT domain info'
                    w = PrettyTable()
                    for k in data['WOT domain info']:
                        w.add_row([colorize(colorize(str(k), blue), bold),
                                   colorize(str(data['WOT domain info'][k]),
                                   green)])
                    w.align = "l"
                    w.header = False
                    print w
                if 'subdomains' in data:
                    subtable = PrettyTable(colorize(colorize(['Subdomains'],
                                                             blue), bold))
                    for s in data['subdomains']:
                        subtable.add_row([colorize(str(s), green)])
                    subtable.align = "l"
                    print subtable
                whoistable = PrettyTable(colorize(colorize(['Whois lookup'],
                                                           blue), bold))
                whoistable.add_row([data['whois']])
                whoistable.align = "l"
                print whoistable
            if len(data['resolutions']) > 0:
                print 'Resolutions {0}'.format(len(data['resolutions']))
                restable = PrettyTable(colorize(colorize([headtype,
                                                         'Last resolved'],
                                                         blue), bold))
                for ip in data['resolutions']:
                    restable.add_row(colorize([ip[headtype2],
                                               ip['last_resolved']], green))
                restable.align = "l"
                print restable
            if len(data['detected_urls']) > 0:
                print 'URLs {0}'.format(len(data['detected_urls']))
                urltable = PrettyTable(colorize(colorize(['Analysis date',
                                                          'Detection ratio',
                                                          'URL'], blue), bold))
                for u in data['detected_urls']:
                    adate = u['scan_date']
                    positives = u['positives']
                    total = u['total']
                    url = u['url']
                    ratio = '{0}/{1}'.format(positives, total)
                    if int(positives) > (int(total) / 2):
                        c = red
                    else:
                        c = green
                    urltable.add_row(colorize([adate, ratio, url], c))
                urltable.align = "l"
                print urltable
            if 'detected_referrer_samples' in data:
                print 'Detected referrer samples {0}'.format(
                      len(data['detected_referrer_samples']))
                dreftable = PrettyTable(colorize(colorize(['SHA256',
                                                           'Detection ratio'],
                                                 blue), bold))
                for dref in data['detected_referrer_samples']:
                    positives = dref['positives']
                    total = dref['total']
                    ratio = '{0}/{1}'.format(positives, total)
                    shahash = dref['sha256']
                    if int(positives) > (int(total) / 2):
                        c = red
                    else:
                        c = green
                    dreftable.add_row(colorize([shahash, ratio], c))
                dreftable.align = "l"
                print dreftable
            if 'detected_downloaded_samples' in data:
                print 'Detected downloaded samples {0}'.format(
                      len(data['detected_downloaded_samples']))
                ddowntable = PrettyTable(colorize(colorize(['Analysis date',
                                                            'SHA256',
                                                            'Detection ratio'],
                                                  blue), bold))
                for ddown in data['detected_downloaded_samples']:
                    adate = ddown['date']
                    positives = ddown['positives']
                    total = ddown['total']
                    ratio = '{0}/{1}'.format(positives, total)
                    shahash = ddown['sha256']
                    if int(positives) > (int(total) / 2):
                        c = red
                    else:
                        c = green
                    ddowntable.add_row(colorize([adate, shahash, ratio], c))
                ddowntable.align = "l"
                print ddowntable
            if 'detected_communicating_samples' in data:
                print 'Detected communicating samples {0}'.format(
                      len(data['detected_communicating_samples']))
                dcommtable = PrettyTable(colorize(colorize(['Analysis date',
                                                            'SHA256',
                                                            'Detection ratio'],
                                                  blue), bold))
                for dcomm in data['detected_communicating_samples']:
                    adate = dcomm['date']
                    positives = dcomm['positives']
                    total = dcomm['total']
                    ratio = '{0}/{1}'.format(positives, total)
                    shahash = dcomm['sha256']
                    if int(positives) > (int(total) / 2):
                        c = red
                    else:
                        c = green
                    dcommtable.add_row(colorize([adate, shahash, ratio], c))
                dcommtable.align = "l"
                print dcommtable
    elif arguments['scan'] and not arguments['--rescan']:
        if arguments['url']:
            metafields['URL'] = data['url']
        elif arguments['file']:
            metafields['MD5'] = data['md5']
            metafields['SHA1'] = data['sha1']
            metafields['SHA256'] = data['sha256']
        metafields['Scan id'] = data['scan_id']
        if shorten:
            link = g.shorten(data['permalink'])['id']
        else:
            link = data['permalink']
        metafields['Link'] = link
        for f in metafields:
            metatable.add_row([colorize(colorize(f, blue), bold),
                               colorize(str(metafields[f]), green)])
        metatable.align = "l"
        metatable.header = False
        print metatable
        time.sleep(30)
        arguments['scan'] = False
        arguments['report'] = True
        arguments['<resource>'] = data['scan_id']
        key = readconfig('virustotal')
        vtc = Virustotal(key)
        if arguments['url']:
            output(vtc.urlReport(arguments['<resource>']))
        elif arguments['file']:
            output(vtc.rscReport(arguments['<resource>']))
    elif arguments['--rescan']:
        arguments['scan'] = False
        arguments['report'] = True
        arguments['<resource>'] = data['scan_id']
        key = readconfig('virustotal')
        vtc = Virustotal(key)
        output(vtc.rscReport(arguments['<resource>']))
예제 #13
0
         DB_DEBUG=args.db_debug,
         TASK_REFRESH=args.task_refresh))

if app.config["CACHEBUSTER"]:
    from utils import cache_buster
    app.after_request(cache_buster)

db_obj = DB(app.config["DBUSER"],
            app.config["DBPASSWORD"],
            app.config["DBHOST"],
            app.config["DBPORT"],
            app.config["DBNAME"],
            debug=app.config["DEBUG"],
            db_debug=app.config["DB_DEBUG"])

v = Virustotal(app.config["VIRUSTOTAL_API_KEY"], debug=debug)

c = Cuckoo(app.config["CUCKOO_API_URL"],
           app.config["CUCKOO_API_USER"],
           app.config["CUCKOO_API_PASS"],
           debug=debug)

s = SSLSite(debug=debug)
try:
    t = TopSite(debug=debug)
except Exception as e:
    logging.info(
        "error fetching top 1M site list, falling back to pickle method")
    t = TopSite(autoload=False, debug=debug)
    t._load_from_pickle()
예제 #14
0
class ClientController:
    def __init__(self):
        self.session = None
        if config.get("submit_to_vt"):
            self.vt = Virustotal(config.get("vt_key", optional=True))
        else:
            self.vt = None
        self.cuckoo = Cuckoo(config)
        self.do_ip_to_asn_resolution = config.get("do_ip_to_asn_resolution",
                                                  optional=True,
                                                  default=True)

    def get_asn(self, asn):
        asn_obj = self.session.query(ASN).filter(ASN.asn == asn).first()

        if asn_obj:
            return asn_obj.json(depth=1)
        else:
            asn_info = get_asn_info(asn)
            if asn_info:
                asn_obj = ASN(asn=asn,
                              name=asn_info['name'],
                              reg=asn_info['reg'],
                              country=asn_info['country'])
                self.session.add(asn_obj)
                return asn_obj.json(depth=1)

    @db_wrapper
    def put_session(self, session):
        ipinfo = None
        asn = None
        block = None
        country = None
        network_id = None

        if self.do_ip_to_asn_resolution:
            ipinfo = get_ip_info(session["ip"])
            if ipinfo:
                asn_obj = self.get_asn(ipinfo["asn"])
                asn = ipinfo["asn"]
                block = ipinfo["ipblock"]
                country = ipinfo["country"]

        # Calculate "hash"
        connhash = ""
        for event in session["stream"]:
            if event["in"]:
                line = event["data"]
                line = ''.join(char for char in line
                               if ord(char) < 128 and ord(char) > 32)
                if line != "":
                    linehash = abs(hash(line)) % 0xFFFF
                    connhash += struct.pack("!H", linehash)
        connhash = connhash.encode("hex")

        backend_user = self.session.query(User).filter(
            User.username == session["backend_username"]).first()

        conn = Connection(ip=session["ip"],
                          user=session["user"],
                          date=session["date"],
                          password=session["pass"],
                          stream=json.dumps(session["stream"]),
                          asn_id=asn,
                          ipblock=block,
                          country=country,
                          connhash=connhash,
                          backend_user_id=backend_user.id)

        self.session.add(conn)
        self.session.flush()  # to get id

        samples = []
        urls = []
        for sample_json in session["samples"]:
            sample, url = self.create_url_sample(sample_json)

            if network_id == None and sample.network_id != None:
                network_id = sample.network_id

            if network_id == None and url.network_id != None:
                network_id = url.network_id

            conn.urls.append(url)
            samples.append(sample)
            urls.append(url)

        # Find previous connections
        # A connection is associated when:
        #  - same honeypot/user
        #  - connection happened as long as 120s before
        #  - same client ip OR same username/password combo
        assoc_timediff = 120
        previous_conns = (self.session.query(Connection).filter(
            Connection.date > (conn.date - assoc_timediff),
            or_(
                and_(Connection.user == conn.user,
                     Connection.password == conn.password),
                Connection.ip == conn.ip),
            Connection.backend_user_id == conn.backend_user_id,
            Connection.id != conn.id).all())

        for prev in previous_conns:
            if network_id == None and prev.network_id != None:
                network_id = prev.network_id
            conn.conns_before.append(prev)

        # Check connection against all tags
        tags = self.session.query(Tag).all()
        for tag in tags:
            json_obj = conn.json(depth=0)
            json_obj["text_combined"] = filter_ascii(json_obj["text_combined"])
            if simple_eval(tag.code, names=json_obj) == True:
                self.db.link_conn_tag(conn.id, tag.id)

        # Only create new networks for connections with urls or associtaed conns,
        # to prevent the creation of thousands of networks
        # NOTE: only conns with network == NULL will get their network updated
        #       later so whe should only create a network where we cannot easily
        #       change it later
        if (len(conn.urls) > 0
                or len(previous_conns) > 0) and network_id == None:
            network_id = self.create_network().id

        # Update network on self
        conn.network_id = network_id

        # Update network on all added Urls
        for url in urls:
            if url.network_id == None:
                url.network_id = network_id

        # Update network on all added Samples
        for sample in samples:
            if sample.network_id == None:
                sample.network_id = network_id

        # Update network on all previous connections withut one
        if network_id != None:
            for prev in previous_conns:
                if prev.network_id == None:
                    prev.network_id = network_id

        self.session.flush()
        return []

    @db_wrapper
    def create_network(self):
        net = Network()
        self.session.add(net)
        self.session.flush()
        return net

    def create_url_sample(self, f):
        url = self.session.query(Url).filter(Url.url == f["url"]).first()
        if url == None:
            url_ip = None
            url_asn = None
            url_country = None

            if self.do_ip_to_asn_resolution:
                url_ip, url_info = get_url_info(f["url"])
                if url_info:
                    asn_obj_url = self.get_asn(url_info["asn"])
                    url_asn = url_info["asn"]
                    url_country = url_info["country"]

            url = Url(url=f["url"],
                      date=f["date"],
                      ip=url_ip,
                      asn=url_asn,
                      country=url_country)
            self.session.add(url)

        sample = self.session.query(Sample).filter(
            Sample.sha256 == f["sha256"]).first()
        if sample == None:
            result = None
            try:
                if self.vt != None:
                    vtobj = self.vt.query_hash_sha256(f["sha256"])
                    if vtobj:
                        result = str(vtobj["positives"]) + "/" + str(
                            vtobj["total"]) + " " + self.vt.get_best_result(
                                vtobj)
            except:
                pass

            sample = Sample(sha256=f["sha256"],
                            name=f["name"],
                            length=f["length"],
                            date=f["date"],
                            info=f["info"],
                            result=result)
            self.session.add(sample)

        if sample.network_id != None and url.network_id == None:
            url.network_id = sample.network_id

        if sample.network_id == None and url.network_id != None:
            sample.network_id = url.network_id

        url.sample = sample

        return sample, url

    @db_wrapper
    def put_sample(self, data):
        sha256 = hashlib.sha256(data).hexdigest()
        self.db.put_sample_data(sha256, data)
        if config.get("cuckoo_enabled"):
            self.cuckoo.upload(os.path.join(config.get("sample_dir"), sha256),
                               sha256)
        elif config.get("submit_to_vt"):
            self.vt.upload_file(os.path.join(config.get("sample_dir"), sha256),
                                sha256)

    @db_wrapper
    def update_vt_result(self, sample_sha):
        sample = self.session.query(Sample).filter(
            Sample.sha256 == sample_sha).first()
        if sample:
            vtobj = self.vt.query_hash_sha256(sample_sha)
            if vtobj:
                sample.result = str(vtobj["positives"]) + "/" + str(
                    vtobj["total"]) + " " + self.vt.get_best_result(vtobj)
                return sample.json(depth=1)
        return None
 def __init__(self):
     self.session = None
     self.vt = Virustotal(config.get("vt_key"))
     self.cuckoo = Cuckoo(config)
예제 #16
0
def table(data):
    try:
        googlkey = readconfig('googl')
        g = Googl(googlkey)
        shorten = True
    except:
        googlkey = None
        shorten = False
    metatable = PrettyTable()
    metafields = collections.OrderedDict()
    if arguments['report']:
        if not arguments['ip'] and not arguments['domain']:
            if arguments['url']:
                metafields['URL'] = data['url']
            elif arguments['file'] or arguments['hash']:
                metafields['MD5'] = data['md5']
                metafields['SHA1'] = data['sha1']
                metafields['SHA256'] = data['sha256']
            if int(data['positives']) > (int(data['total']) / 2):
                c = red
            else:
                c = green
            detectionratio = '{0}/{1}'.format(data['positives'], data['total'])
            metafields['Detection ratio'] = '{0}'.format(detectionratio)
            metafields['Analysis date'] = data['scan_date']
            metafields['Scan id'] = data['scan_id']
            if shorten:
                link = g.shorten(data['permalink'])['id']
            else:
                link = data['permalink']
            metafields['Link'] = link
            for f in metafields:
                col = green
                if f == 'Detection ratio':
                    col = c
                metatable.add_row([
                    colorize(colorize(f, blue), bold),
                    colorize(str(metafields[f]), col)
                ])
            metatable.align = "l"
            metatable.header = False
            print metatable
            scans = data['scans']
            scanstable = PrettyTable(
                colorize(
                    colorize(['Engine', 'Detected', 'Result', 'Detail'], blue),
                    bold))
            for key in scans.keys():
                engine = key
                detected = scans[key]['detected']
                result = scans[key]['result']
                if 'detail' in scans[key]:
                    if shorten:
                        detail = g.shorten(scans[key]['detail'])['id']
                    else:
                        detail = scans[key]['detail']
                else:
                    detail = None
                if detected:
                    scanstable.add_row(
                        colorize([engine, detected, result, detail], red))
                else:
                    scanstable.add_row(
                        colorize([engine, detected, result, detail], green))
            scanstable.align = "l"
            print scanstable
        elif arguments['ip'] or arguments['domain']:
            if arguments['ip']:
                headtype = 'Hostname'
                headtype2 = 'hostname'
                if 'asn' in data:
                    metafields['AS owner'] = data['as_owner']
                    metafields['ASN'] = data['asn']
                    metafields['Country'] = data['country']
                    for f in metafields:
                        metatable.add_row([
                            colorize(colorize(f, blue), bold),
                            colorize(str(metafields[f]), green)
                        ])
                    metatable.align = "l"
                    metatable.header = False
                    print metatable
            elif arguments['domain']:
                headtype = 'IP address'
                headtype2 = 'ip_address'
                cattable = PrettyTable(
                    colorize(colorize(['Categories'], blue), bold))
                for c in data['categories']:
                    cattable.add_row([colorize(str(c), green)])
                cattable.align = "l"
                print cattable
                if 'WOT domain info' in data:
                    print 'WOT domain info'
                    w = PrettyTable()
                    for k in data['WOT domain info']:
                        w.add_row([
                            colorize(colorize(str(k), blue), bold),
                            colorize(str(data['WOT domain info'][k]), green)
                        ])
                    w.align = "l"
                    w.header = False
                    print w
                if 'subdomains' in data:
                    subtable = PrettyTable(
                        colorize(colorize(['Subdomains'], blue), bold))
                    for s in data['subdomains']:
                        subtable.add_row([colorize(str(s), green)])
                    subtable.align = "l"
                    print subtable
                whoistable = PrettyTable(
                    colorize(colorize(['Whois lookup'], blue), bold))
                whoistable.add_row([data['whois']])
                whoistable.align = "l"
                print whoistable
            if len(data['resolutions']) > 0:
                print 'Resolutions {0}'.format(len(data['resolutions']))
                restable = PrettyTable(
                    colorize(colorize([headtype, 'Last resolved'], blue),
                             bold))
                for ip in data['resolutions']:
                    restable.add_row(
                        colorize([ip[headtype2], ip['last_resolved']], green))
                restable.align = "l"
                print restable
            if len(data['detected_urls']) > 0:
                print 'URLs {0}'.format(len(data['detected_urls']))
                urltable = PrettyTable(
                    colorize(
                        colorize(['Analysis date', 'Detection ratio', 'URL'],
                                 blue), bold))
                for u in data['detected_urls']:
                    adate = u['scan_date']
                    positives = u['positives']
                    total = u['total']
                    url = u['url']
                    ratio = '{0}/{1}'.format(positives, total)
                    if int(positives) > (int(total) / 2):
                        c = red
                    else:
                        c = green
                    urltable.add_row(colorize([adate, ratio, url], c))
                urltable.align = "l"
                print urltable
            if 'detected_referrer_samples' in data:
                print 'Detected referrer samples {0}'.format(
                    len(data['detected_referrer_samples']))
                dreftable = PrettyTable(
                    colorize(colorize(['SHA256', 'Detection ratio'], blue),
                             bold))
                for dref in data['detected_referrer_samples']:
                    positives = dref['positives']
                    total = dref['total']
                    ratio = '{0}/{1}'.format(positives, total)
                    shahash = dref['sha256']
                    if int(positives) > (int(total) / 2):
                        c = red
                    else:
                        c = green
                    dreftable.add_row(colorize([shahash, ratio], c))
                dreftable.align = "l"
                print dreftable
            if 'detected_downloaded_samples' in data:
                print 'Detected downloaded samples {0}'.format(
                    len(data['detected_downloaded_samples']))
                ddowntable = PrettyTable(
                    colorize(
                        colorize(
                            ['Analysis date', 'SHA256', 'Detection ratio'],
                            blue), bold))
                for ddown in data['detected_downloaded_samples']:
                    adate = ddown['date']
                    positives = ddown['positives']
                    total = ddown['total']
                    ratio = '{0}/{1}'.format(positives, total)
                    shahash = ddown['sha256']
                    if int(positives) > (int(total) / 2):
                        c = red
                    else:
                        c = green
                    ddowntable.add_row(colorize([adate, shahash, ratio], c))
                ddowntable.align = "l"
                print ddowntable
            if 'detected_communicating_samples' in data:
                print 'Detected communicating samples {0}'.format(
                    len(data['detected_communicating_samples']))
                dcommtable = PrettyTable(
                    colorize(
                        colorize(
                            ['Analysis date', 'SHA256', 'Detection ratio'],
                            blue), bold))
                for dcomm in data['detected_communicating_samples']:
                    adate = dcomm['date']
                    positives = dcomm['positives']
                    total = dcomm['total']
                    ratio = '{0}/{1}'.format(positives, total)
                    shahash = dcomm['sha256']
                    if int(positives) > (int(total) / 2):
                        c = red
                    else:
                        c = green
                    dcommtable.add_row(colorize([adate, shahash, ratio], c))
                dcommtable.align = "l"
                print dcommtable
    elif arguments['scan'] and not arguments['--rescan']:
        if arguments['url']:
            metafields['URL'] = data['url']
        elif arguments['file']:
            metafields['MD5'] = data['md5']
            metafields['SHA1'] = data['sha1']
            metafields['SHA256'] = data['sha256']
        metafields['Scan id'] = data['scan_id']
        if shorten:
            link = g.shorten(data['permalink'])['id']
        else:
            link = data['permalink']
        metafields['Link'] = link
        for f in metafields:
            metatable.add_row([
                colorize(colorize(f, blue), bold),
                colorize(str(metafields[f]), green)
            ])
        metatable.align = "l"
        metatable.header = False
        print metatable
        time.sleep(30)
        arguments['scan'] = False
        arguments['report'] = True
        arguments['<resource>'] = data['scan_id']
        key = readconfig('virustotal')
        vtc = Virustotal(key)
        if arguments['url']:
            output(vtc.urlReport(arguments['<resource>']))
        elif arguments['file']:
            output(vtc.rscReport(arguments['<resource>']))
    elif arguments['--rescan']:
        arguments['scan'] = False
        arguments['report'] = True
        arguments['<resource>'] = data['scan_id']
        key = readconfig('virustotal')
        vtc = Virustotal(key)
        output(vtc.rscReport(arguments['<resource>']))
예제 #17
0
 def __init__(self):
     self.vt = Virustotal(config["vt_key"])
     self.db = None
     self.sess = None
예제 #18
0
from virustotal import Virustotal
from pprint import pprint

# Normal Initialisation.
vtotal = Virustotal("Insert API Key Here.")

# NEW as of version 0.0.5: Proxy support.
# Example Usage: Using HTTP(S)
vtotal = Virustotal("Insert API Key Here.", {
    "http": "http://10.10.1.10:3128",
    "https": "http://10.10.1.10:1080"
})
# Or using SOCKS
vtotal = Virustotal(
    "Insert API Key Here.", {
        "http": "socks5://user:pass@host:port",
        "https": "socks5://user:pass@host:port"
    })

# NOTE: Check virustotal.py for docstrings containing full parameter descriptions.

# Send a file to Virustotal for analysis.
resp = vtotal.file_scan("./tests.py")  # PATH to file for querying.

# NOTE: This endpoint has been removed from the Public Virustotal API.
# Resend a file to Virustotal for analysis.
# A list containing the resource (SHA256) HASH of the file above.
#resp = vtotal.file_rescan(
#    ["75efd85cf6f8a962fe016787a7f57206ea9263086ee496fc62e3fc56734d4b53"]
#)
## A list containing md5/sha1/sha256 hashes. Can be a combination of any of the three allowed hashes (MAX 25 items).
import os

from util.dbg import dbg
from virustotal import Virustotal
from sampledb import Sampledb

vt = Virustotal()
sdb = Sampledb()

# Engines on vt providing good results
engines = ["DrWeb", "Kaspersky", "ESET-NOD32"]


def getName(r):
    if r["scans"]:
        for e in engines:
            if r["scans"][e] and r["scans"][e]["detected"]:
                return r["scans"][e]["result"]
        for e, x in r["scans"].iteritems():
            if x["detected"]:
                return x["result"]
        return None
    else:
        return None


#sdb.sql.execute('ALTER TABLE samples ADD COLUMN result TEXT')
#sdb.sql.commit()
for row in sdb.sql.execute(
        'SELECT id, sha256 FROM samples WHERE result is NULL'):
    r = vt.query_hash_sha256(row[1])