Example #1
0
    def __init__(self):
        super().__init__()

        self.conn = connect_db()

        self.get('/api/log', callback=self.list_logs)
        self.get('/api/log/<id_:int>', callback=self.get_log)
        self.delete('/api/log/<id_:int>', callback=self.del_log)

        self.get('/api/plot', callback=self.list_plots)
        self.post('/api/plot', callback=self.new_plot)
        self.get('/api/plot/<id_:int>', callback=self.get_plot)
        self.delete('/api/plot/<id_:int>', callback=self.del_plot)

        self.get('/api/plot/<plot:int>/series', callback=self.list_series)
        self.post('/api/plot/<plot:int>/series', callback=self.new_series)
        self.get('/api/plot/<plot:int>/series/<id_:int>',
                 callback=self.get_series)
        self.delete('/api/plot/<plot:int>/series/<id_:int>',
                    callback=self.del_series)
        self.put('/api/plot/<plot:int>/series/<id_:int>',
                 callback=self.update_series)

        self.route('/', callback=self.root)
        self.route('/plot', callback=self.plot)
        self.route('/<path:path>', callback=self.static)
    def handle_image_files(self, casedir, basedir, dbname="acquire_files.db", ehashfname=""):

        # see if any images were added to the evidence list
        try:
            fd = open(os.path.join(basedir, dbname))
        except:
            return

        # the conn/cursor to read the source database, only selects are performed, no commit needed
        (conn, cursor) = common.connect_db(basedir, dbname)
        cursor.execute("select filename, id from evidence_sources")
        imgs = cursor.fetchall()
        
        # each image file
        for (self.img_filename, orig_id) in imgs:

            evi_id = self.insert_evidence_source(self.img_filename, 0, ehashfname)
       
            cursor.execute("select number, offset, id from partitions where evidence_file_id=?", [orig_id])
            parts = cursor.fetchall()            

            # each partition in the image
            for (number, offset, part_id) in parts:
            
                new_part_id = self.insert_partition(number, offset, evi_id)

                cursor.execute("select group_name,id from file_groups where partition_id=?", [part_id])
                groups = cursor.fetchall()

                self.insert_groups(groups, new_part_id, basedir, cursor)
Example #3
0
    def db_ops(self):

        (self.conn, self.cursor) = common.connect_db(self.store_dir,
                                                     "acquire_files.db")

        self.cursor.execute(
            "select sql from sqlite_master where type='table' and name=?",
            ["evidence_sources"])

        # need to iniitalize the table of files
        # nothing to do if already created
        if not self.cursor.fetchall():

            tables = [
                "evidence_sources (filename text,   id integer primary key asc)",
                "partitions       (number int, offset int, evidence_file_id int, id integer primary key asc)",
                "file_groups      (group_name text, partition_id int, id integer primary key asc)",
                "reg_type         (type_name text, file_group_id int, id integer primary key asc)",
                "rp_groups        (rpname text, reg_type_id int, id integer primary key asc)",
                "registry_files   (filename text,  mtime text, reg_type_id int, file_id int, file_type int, id integer primary key asc)",
            ]

            for table in tables:
                self.cursor.execute("create table " + table)

            self.conn.commit()
Example #4
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--comment', default='')
    parser.add_argument('log')
    args = parser.parse_args()

    id_ = gen_id()
    path = os.path.abspath(args.log)

    conn = connect_db()
    try:
        with conn:
            conn.execute(
                r'INSERT INTO logs VALUES(?,?,?)',
                (id_, path, args.comment))
    except sqlite3.IntegrityError:
        sys.exit('{:s} is already registered'.format(path))
    def db_ops(self):
 
        (self.conn, self.cursor) = common.connect_db(self.store_dir, "acquire_files.db")
        
        self.cursor.execute("select sql from sqlite_master where type='table' and name=?", ["evidence_sources"])
       
        # need to iniitalize the table of files
        # nothing to do if already created
        if not self.cursor.fetchall():
            
            tables = ["evidence_sources (filename text,   id integer primary key asc)",
                      "partitions       (number int, offset int, evidence_file_id int, id integer primary key asc)",
                      "file_groups      (group_name text, partition_id int, id integer primary key asc)",
                      "reg_type         (type_name text, file_group_id int, id integer primary key asc)",
                      "rp_groups        (rpname text, reg_type_id int, id integer primary key asc)",
                      "registry_files   (filename text,  mtime text, reg_type_id int, file_id int, file_type int, id integer primary key asc)",
                     ]
    
            for table in tables:
                self.cursor.execute("create table " + table)
                
            self.conn.commit()
Example #6
0
import sys
import common as c
import json
import math
from Event import Event

def main(uid):
  print Event.get_group(uid)

if __name__ == '__main__':
	if len(sys.argv) == 2:
		c.connect_db()
		main(sys.argv[1])
	else:
		print 'Usage: python get_group.py <uid>'
Example #7
0
    'epic':
    frozenset(['epic']),
    'genre':
    frozenset([
        'rpg', 'role playing', 'fps', 'first person shooter', 'simulation',
        'sim', 'racing', 'sport', 'sports', 'adventure', 'action', 'puzzle',
        'tbs', 'turn based stragegy', 'rts', 'realtime strategy',
        'real time strategy', 'roguelike', 'visual novel', 'platformer',
        'shooter', 'horror', 'fighting', 'platform', 'shmup', 'beat em up',
        'pinball', 'sandbox', 'tower defense', 'mmorpg'
    ]),
    'roguelike':
    frozenset(['roguelike'])
}

db_connection = common.connect_db()
db_test = db_connection.cursor()
db_write = db_connection.cursor()
db_connection.row_factory = sqlite3.Row
db_iterate = db_connection.cursor()

progress = 0
db_iterate.execute('SELECT * FROM projects')
for project in db_iterate:
    print(u'Processing {0}'.format(project['title']))
    aggregate = []
    original_aggregate = []
    counts = {'epic': 0, 'zombie': 0, 'roguelike': 0, 'names': 0, 'genre': 0}

    test_seen = {
        'epic': set(),
Example #8
0
import common

db_connection = common.connect_db()
db_connection.execute('DROP TABLE names')
db_connection.execute('DROP TABLE match_names')
db_connection.execute('DROP TABLE match_roguelike')
db_connection.execute('DROP TABLE match_epic')
db_connection.execute('DROP TABLE match_genre')
db_connection.execute('DROP TABLE match_zombie')

Example #9
0
 def setUp(self):
     c.connect_db()
     for e in test_events:
         save_event.main(json.dumps(e))
Example #10
0
# Percent and total projects with feature over per-feature threshold
# Projects with feature over per-feature threshold per time period (20 periods)
# Correlation of each feature to epic feature, based on binary incidence
#   Binary because: Low incidence levels may be noise, and I don't expect any incidence to go above 4-5.
#   Any present feature may be indicative of a bad writeup/game, but I want to know if the way they are bad is related (correlated).
# Coincidence of each feature to epic feature
# Projects vs # features present, to see how many projects have no features

import datetime
from scipy.stats.stats import pearsonr
import pygal
import sqlite3

import common

db = common.connect_db()
db_test = db.cursor()
db.row_factory = sqlite3.Row
db_iterate = db.cursor()

def format_time(stamp):
    return datetime.datetime.fromtimestamp(stamp).strftime('(%Y) %m-%d')

total, oldest, newest = tuple(db.execute('SELECT COUNT(1), MIN(date), MAX(date) FROM projects').fetchone())
print('Total projects: {0}'.format(total))
print('Oldest project: {0}'.format(format_time(oldest)))
print('Newest project: {0}'.format(format_time(newest)))

features = {
    'epic': { 
        'threshold': 1, 
Example #11
0
# Percent and total projects with feature over per-feature threshold
# Projects with feature over per-feature threshold per time period (20 periods)
# Correlation of each feature to epic feature, based on binary incidence
#   Binary because: Low incidence levels may be noise, and I don't expect any incidence to go above 4-5.
#   Any present feature may be indicative of a bad writeup/game, but I want to know if the way they are bad is related (correlated).
# Coincidence of each feature to epic feature
# Projects vs # features present, to see how many projects have no features

import datetime
from scipy.stats.stats import pearsonr
import pygal
import sqlite3

import common

db = common.connect_db()
db_test = db.cursor()
db.row_factory = sqlite3.Row
db_iterate = db.cursor()


def format_time(stamp):
    return datetime.datetime.fromtimestamp(stamp).strftime("(%Y) %m-%d")


total, oldest, newest = tuple(db.execute("SELECT COUNT(1), MIN(date), MAX(date) FROM projects").fetchone())
print("Total projects: {0}".format(total))
print("Oldest project: {0}".format(format_time(oldest)))
print("Newest project: {0}".format(format_time(newest)))

features = {