コード例 #1
0
def download_screenshots(options,positionals):
    """
        Download all of the screenshots from a mongoDB server
    """
    uri = 'mongodb://'+options.services_host+':27017/lophi_db'
    
    print "* Connecting to %s..."%uri
    
    # Initialize our database connections
    client = MongoClient(uri)
    DB = MongoDb(uri)

    ensure_dir(ss_phys)
    ensure_dir(ss_virt)
    
    # Loop over all of our analyses.
    db = client.lophi_db
    analyses = db.analyses
    for analysis in analyses.find():
        
        print analysis['_id']
        if "output_files" in analysis:
            if "screenshot_final" in analysis['output_files']:
                ss_id = analysis['output_files']['screenshot_final']
                print "Downloading %s..."%ss_id
                if analysis['machine_type'] == 2:
                   DB.download_file(ss_id, os.path.join(ss_virt, analysis[
                                                             '_id']+'.ppm'))
                else:
                   DB.download_file(ss_id, os.path.join(ss_phys, analysis[
                                                            '_id']+'.png'))
コード例 #2
0
ファイル: get_analyses.py プロジェクト: mit-ll/LO-PHI
def main(args):

    logging.basicConfig()

    # debug
    if args.debug:
        logger.setLevel(logging.DEBUG)

    db_uri = 'mongodb://' + args.db_host + ':27017/lophi_db'
    DB = MongoDb(db_uri)
    analysis_datastore = datastore.Datastore(db_uri + G.DB_ANALYSES)
    files_datastore = datastore.Datastore(db_uri + '/fs.files')

    results = analysis_datastore.db.collection.find({
        'status':
        'COMPLETED',
        "sample":
        args.sample_id,
        'machine_type':
        int(args.machine_type)
    })

    logger.info("Number of completed analyses for sample id %s : %d" %
                (args.sample_id, results.count()))

    if not os.path.exists(args.out_dir):
        os.mkdir(args.out_dir)

    for analysis_doc in results:
        analysis_id = analysis_doc['_id']
        logger.info("Downloading files for analysis id %s" % analysis_id)

        outdir_path = os.path.join(args.out_dir, analysis_id)
        if not os.path.exists(outdir_path):
            os.mkdir(outdir_path)
        else:
            logger.info("Analysis directory already exists, skipping.")
            continue

        # write the analysis doc
        analysis_doc_f = open(os.path.join(outdir_path, 'analysis_doc'), 'w')
        analysis_doc_f.write(str(analysis_doc))
        analysis_doc_f.close()

        # grab the disk log
        disk_cap_id = analysis_doc['output_files']['disk_capture']
        disk_cap_url = os.path.join(outdir_path, 'disk.dcap')

        logger.debug("Downloading disk capture log %s" % disk_cap_url)
        files_datastore.download_file(disk_cap_id, disk_cap_url)

        # grab memory snapshots
        clean_memory_dump_id = analysis_doc['output_files'][
            'memory_dump_clean']
        dirty_memory_dump_id = analysis_doc['output_files'][
            'memory_dump_dirty']

        clean_url = os.path.join(outdir_path, 'clean_mem')
        dirty_url = os.path.join(outdir_path, 'dirty_mem')

        logger.debug("Downloading clean memory dump to %s" % clean_url)
        files_datastore.download_file(clean_memory_dump_id, clean_url)

        logger.debug("Downloading dirty memory dump to %s" % dirty_url)
        files_datastore.download_file(dirty_memory_dump_id, dirty_url)

        screenshot1 = os.path.join(outdir_path, 'screenshot_interm')
        screenshot1_id = analysis_doc['output_files']['screenshot']
        screenshot2 = os.path.join(outdir_path, 'screenshot_final')
        screenshot2_id = analysis_doc['output_files']['screenshot_final']

        if args.machine_type == 2:
            DB.download_file(screenshot1_id, screenshot1 + '.ppm')
            DB.download_file(screenshot2_id, screenshot2 + '.ppm')
        else:
            DB.download_file(screenshot1_id, screenshot1 + '.png')
            DB.download_file(screenshot2_id, screenshot2 + '.png')

        # unpack memory snapshots
        if tarfile.is_tarfile(clean_url):
            logger.debug("Unpacking %s" % clean_url)
            clean_path_out = os.path.join(outdir_path, "sut_memory_clean.mfd")
            clean_tar = tarfile.open(clean_url)
            clean_tar.extractall(outdir_path)
            clean_tar.close()

            # find stupid path
            p = os.path.join(outdir_path, 'lophi', 'tmp')
            p = os.path.join(p, os.listdir(p)[0])
            p = os.path.join(p, os.listdir(p)[0])

            logger.debug("Moving %s to %s" % (p, clean_path_out))
            shutil.move(p, clean_path_out)
            p = os.path.join(outdir_path, 'lophi')
            shutil.rmtree(p)

        if tarfile.is_tarfile(dirty_url):
            logger.debug("Unpacking %s" % dirty_url)
            dirty_path_out = os.path.join(outdir_path, "sut_memory_dirty.mfd")
            dirty_tar = tarfile.open(dirty_url)
            dirty_tar.extractall(outdir_path)
            dirty_tar.close()

            # find stupid path
            p = os.path.join(outdir_path, 'lophi', 'tmp')
            p = os.path.join(p, os.listdir(p)[0])
            p = os.path.join(p, os.listdir(p)[0])

            logger.debug("Moving %s to %s" % (p, dirty_path_out))
            shutil.move(p, dirty_path_out)
            p = os.path.join(outdir_path, 'lophi')
            shutil.rmtree(p)
コード例 #3
0
def main(args):

    logging.basicConfig()

    # debug
    if args.debug:
        logger.setLevel(logging.DEBUG)

    db_uri = 'mongodb://'+args.db_host+':27017/lophi_db'
    DB = MongoDb(db_uri)
    analysis_datastore = datastore.Datastore(db_uri+G.DB_ANALYSES)
    files_datastore = datastore.Datastore(db_uri+'/fs.files')

    results = analysis_datastore.db.collection.find({'status':'COMPLETED',
                                                     "sample":args.sample_id,
                                                     'machine_type':int(args.machine_type)})

    logger.info("Number of completed analyses for sample id %s : %d" % (args.sample_id, results.count()))

    if not os.path.exists(args.out_dir):
        os.mkdir(args.out_dir)

    for analysis_doc in results:
        analysis_id = analysis_doc['_id']
        logger.info("Downloading files for analysis id %s" % analysis_id)

        outdir_path = os.path.join(args.out_dir, analysis_id)
        if not os.path.exists(outdir_path):
            os.mkdir(outdir_path)
        else:
            logger.info("Analysis directory already exists, skipping.")
            continue

        # write the analysis doc
        analysis_doc_f = open(os.path.join(outdir_path, 'analysis_doc'), 'w')
        analysis_doc_f.write(str(analysis_doc))
        analysis_doc_f.close()

        # grab the disk log
        disk_cap_id = analysis_doc['output_files']['disk_capture']
        disk_cap_url = os.path.join(outdir_path, 'disk.dcap')

        logger.debug("Downloading disk capture log %s" % disk_cap_url)
        files_datastore.download_file(disk_cap_id, disk_cap_url)

        # grab memory snapshots
        clean_memory_dump_id = analysis_doc['output_files']['memory_dump_clean']
        dirty_memory_dump_id = analysis_doc['output_files']['memory_dump_dirty']

        clean_url = os.path.join(outdir_path, 'clean_mem')
        dirty_url = os.path.join(outdir_path, 'dirty_mem')

        logger.debug("Downloading clean memory dump to %s" % clean_url)
        files_datastore.download_file(clean_memory_dump_id, clean_url)

        logger.debug("Downloading dirty memory dump to %s" % dirty_url)
        files_datastore.download_file(dirty_memory_dump_id, dirty_url)

        screenshot1 = os.path.join(outdir_path, 'screenshot_interm')
        screenshot1_id = analysis_doc['output_files']['screenshot']
        screenshot2 = os.path.join(outdir_path, 'screenshot_final')
        screenshot2_id = analysis_doc['output_files']['screenshot_final']

        if args.machine_type == 2:
            DB.download_file(screenshot1_id, screenshot1+'.ppm')
            DB.download_file(screenshot2_id,
            screenshot2+'.ppm')
        else:
            DB.download_file(screenshot1_id,
                                     screenshot1+'.png')
            DB.download_file(screenshot2_id,
            screenshot2+'.png')

        # unpack memory snapshots
        if tarfile.is_tarfile(clean_url):
            logger.debug("Unpacking %s" % clean_url)
            clean_path_out = os.path.join(outdir_path, "sut_memory_clean.mfd")
            clean_tar = tarfile.open(clean_url)
            clean_tar.extractall(outdir_path)
            clean_tar.close()

            # find stupid path
            p = os.path.join(outdir_path, 'lophi', 'tmp')
            p = os.path.join(p, os.listdir(p)[0])
            p = os.path.join(p, os.listdir(p)[0])

            logger.debug("Moving %s to %s" % (p, clean_path_out))
            shutil.move(p, clean_path_out)
            p = os.path.join(outdir_path, 'lophi')
            shutil.rmtree(p)

        if tarfile.is_tarfile(dirty_url):
            logger.debug("Unpacking %s" % dirty_url)
            dirty_path_out = os.path.join(outdir_path, "sut_memory_dirty.mfd")
            dirty_tar = tarfile.open(dirty_url)
            dirty_tar.extractall(outdir_path)
            dirty_tar.close()

            # find stupid path
            p = os.path.join(outdir_path, 'lophi', 'tmp')
            p = os.path.join(p, os.listdir(p)[0])
            p = os.path.join(p, os.listdir(p)[0])

            logger.debug("Moving %s to %s" % (p, dirty_path_out))
            shutil.move(p, dirty_path_out)
            p = os.path.join(outdir_path, 'lophi')
            shutil.rmtree(p)