Example #1
0
def create_subset_db(file_index_fn,
                     subset_file_index_fn,
                     processed=True,
                     extension=".hdf5"):
    """ From a main DB, create a subset DB of the main DB, by only extracting
    the hdf5 files. If 'processed' input argument is indicated (as True
    or False), only the processed or non processed hdf5 files will be
    added to the freshly created DB"""

    if os.path.exists(subset_file_index_fn):
        os.remove(subset_file_index_fn)

    directory = os.path.dirname(file_index_fn) + "/"
    subset_file_index_fn = directory + subset_file_index_fn

    file_index_db = TinyDB(file_index_fn,
                           storage=CachingMiddleware(JSONStorage))
    subset_file_index_db = TinyDB(subset_file_index_fn,
                                  storage=CachingMiddleware(JSONStorage))
    subset_file_index_db.purge()
    files = Query()

    if processed is True or processed is False:
        query_cmd = ((files.extension == extension) &
                     (files.processed == processed))
    else:
        query_cmd = (files.extension == extension)
    records = file_index_db.search(query_cmd)
    subset_file_index_db.insert_multiple(records)
    file_index_db.close()
    return subset_file_index_db
Example #2
0
class DB():
    def __init__(self, db_path):
        self.db = TinyDB(db_path)

    def reset(self):
        # Dont release
        self.db.purge()

    def get(self, indexing_id):
        RootObj = Query()


        if not isinstance(indexing_id, str):
            indexing_id = str(indexing_id)

        objs = self.db.search(RootObj.ttrpg_id == indexing_id)
        if objs == []:
            return None
        # print(f"** Got {type(self)} Settings **", objs[0])
        return objs[0]

    def save(self, indexing_id, info):
        
        if not isinstance(indexing_id, str):
            indexing_id = str(indexing_id)

        assert(isinstance(info, dict))
        
        info["ttrpg_id"] = indexing_id

        if self.get(indexing_id) is None:
            self.db.insert(info)
        else:
            self.db.update(info)
Example #3
0
class LoggerDB():
    def __init__(self, db_name):
        self.db_name = db_name
        self.db_path = "../DataRepository"
        self.lable_list = []

    def connectDB(self):
        try:
            self.db = TinyDB(self.db_path + self.db_name)
        except Exception as e:
            print("Cannot connect DataBase")

    def setLabelList(self, label_list):
        self.label_list = label_list

    def insert(self, data_list):
        temp_data = dict(zip(self.label_list, data_list))
        self.db.insert(temp_data)

    def purge(self):
        self.db.purge()

    def query(self):
        return self.db.search((Query()["Num"].test(lambda s: s >= 1)))

    def getLabels(self):
        if not len(self.db) <= 0:
            self.records = self.db.all()
            return self.records[0].keys()
        else:
            return []
Example #4
0
class ScrapingWikipediaPipeline(object):

    ## when this class is used (new instance), certain actions take place
    def __init__(self):
        ## the basic thing to do is to create a connection to the database
        self.create_connection()

    def create_connection(self):
        ## connect to database, or create database if it does not exist
        ## the extra parameters-sort_keys etc,just beuatify the json file
        self.db = TinyDB('../wikipedia2_db.json',
                         sort_keys=True,
                         indent=4,
                         separators=(',', ': '))
        ## This will delete all documents. Use to ensure new connections
        ## start with a clean database.
        self.db.purge()

    def store_db(self, item):
        ## the spider sends the parsed item
        ## which is then inserted in the database
        self.db.insert({'complete_text': item['complete_text']})

    def process_item(self, item, spider):
        ## the actual insertion into the database takes place here
        self.store_db(item)
        return item
Example #5
0
class Widget:
    def __init__(self):
        self.db = TinyDB('./app/db/data.json')

    def get_widgets(self):
        widgets = self.db.all()
        return jsonify(widgets)

    def get_widget(self, name):
        widget = self.db.search(where('name') == name)
        return jsonify(widget)

    def create_widget(self, data):
        self.db.insert(data)
        return self.get_widget(data['name'])

    def upsert_widget(self, name, data):
        Widget = Query()
        self.db.upsert(data, Widget.name == name)
        return self.get_widget(data['name'])

    def delete_widget(self, name):
        self.db.remove(where('name') == name)
        return 'Removed Widget with name %s' % name

    def purge(self):
        self.db.purge()
        return 'Purged database'
Example #6
0
 def test_simple(self):
     # This should create the graph
     # ModuleA ---(core::complex::ProtoBar)---> ModuleB
     # ModuleB ---(core::complex::ProtoFoo)---> ModuleA
     modules = [
         opalbuilder.OpalModule(path=self.complex_module_file,
                                name="core::complex::ModuleA",
                                inputs=[("core::complex::ProtoFoo",
                                         "proto_foo")],
                                outputs=[("core::complex::ProtoBar",
                                          "proto_bar")]),
         opalbuilder.OpalModule(path=self.complex_module_file,
                                name="core::complex::ModuleB",
                                inputs=[("core::complex::ProtoBar",
                                         "proto_bar")],
                                outputs=[("core::complex::ProtoFoo",
                                          "proto_foo")])
     ]
     correct_graph = {
         (modules[0], "core::complex::ProtoBar"): [modules[1]],
         (modules[1], "core::complex::ProtoFoo"): [modules[0]],
     }
     db = TinyDB('test/db.json')
     db.purge()
     graph = opalbuilder.CalculateConnections(modules, tinydb=db)
     db.close()
     self.assertEqual(graph, correct_graph)
     opalbuilder.VisualizeGraph(graph, "test/test_simple.gv")
Example #7
0
def delete():
    if request.method == 'GET':
        # linux
        # db = TinyDB('/pythonscript/db.json')
        db = TinyDB('C:/Users/sitas/Desktop/Database_Maid/db.json')
        db.purge()
        return jsonify({"Delete": "OK"})
Example #8
0
def fillDB():
    db = TinyDB('db.json')
    db.purge()
    friends = api.followers_ids()
    count = 0
    bar = progressbar.ProgressBar(redirect_stdout=True, max_value=len(friends))
    for id in friends:
        user = api.get_user(id)
        db.insert({
            'id': user.id,
            'location': user.location,
            'followers_count': user.followers_count,
            'statuses_count': user.statuses_count,
            'friends_count': user.friends_count,
            'screen_name': user.screen_name,
            'lang': user.lang,
            'following': user.following,
            'time_zone': user.time_zone,
            'created_at': user.created_at,
            'default_user': user.default_profile,
            'default_profile_image': user.default_profile_image
        })
        count = count + 1
        time.sleep(0.1)
        bar.update(count)
Example #9
0
class Posts:
    def __init__(self):
        self.posts = TinyDB('posts.json')
        self.posts.purge()

    def create(self, seller_name, seller_rating, prod_name, price):
        post = {'seller_name': seller_name,
                'prod_name': prod_name,
                'price': price,
                'time': str(datetime.datetime.utcnow()),
                'seller_rating': seller_rating,
                'id': str(uuid.uuid4())
                }
        self.posts.insert(post)

    def delete(self, seller_name=None, prod_name=None):
        if prod_name:
            self.posts.remove(where('prod_name') == prod_name)
        if seller_name:
            self.posts.remove(where('seller_name') == seller_name)

    def fetch_all(self):
        return self.posts.all()

    def fetch_feeds(self, seller_name):
        feeds = self.posts.search(where('seller_name') == seller_name)
        return feeds

    def clear_all(self):
        self.posts.purge()
Example #10
0
class DBCache(Singleton):

    _status = False

    def __init__(self, dbFile=None):
        if not self._status:
            self._status = True
            Singleton.__init__(self, "DBache")

            if (dbFile == None):
                dbFile = 'db_cache.json'

            self._dbHandle = TinyDB(dbFile)
            self._tableHandle = self._dbHandle.table("questions_table")
            self._dbfile = dbFile
            self._dbHandle.purge()
            print "INSERT DB"
            quizzes = json.loads(open('quizzes/sample_qns').read())
            self._tableHandle.insert(quizzes)

    @property
    def dbHandle(self):
        return self._dbHandle

    @property
    def tableHandle(self):
        return self._tableHandle

    @property
    def dbFile(self):
        return self._dbFile
Example #11
0
class Seller:
    def __init__(self):
        self.sellers = TinyDB('sellers.json')
        self.posts = posts
        self.sellers.purge()

    def create(self, name, rating):
        seller = {'name': name,
                  'id': str(uuid.uuid4()),
                  'rating': rating
                  }
        self.sellers.insert(seller)

    def delete(self, name):
        self.sellers.remove(where('name') == name)
        self.posts.delete(seller_name=name)

    def publish(self, seller_name, prod_name, price):
        result = self.sellers.search(where('name') == seller_name)[0]
        seller_rating = result['rating']
        self.posts.create(seller_name, seller_rating, prod_name, price)

    def delete_post(self, seller_name, prod_name):
        self.posts.delete(seller_name, prod_name)

    def fetch_all(self):
        return self.sellers.all()

    def clear_all(self):
        self.sellers.purge()
Example #12
0
def write_db():
    db = TinyDB("myfile.json")
    db.purge()

    for job in get_monster():
        db.insert(job)

    for job in get_total_jobs():
        db.insert(job)
Example #13
0
def init(dbpath=None, test=False):
    if test:
        path_db_asset = Path.home() / Path(".gofixit/asset_test.json")
        path_db_request = Path.home() / Path(".gofixit/request_test.json")

        db_asset = TinyDB(path_db_asset)
        db_asset.purge()
        db_asset = GoFixItDB(db_asset)

        db_request = TinyDB(path_db_request)
        db_request.purge()
        db_request = GoFixItDB(db_request)

        c = Controller(db_asset=db_asset,
                       db_request=db_request,
                       view=ViewTabulate())
        c.add_asset("The House")
        c.add_asset("The House2")
        # print(c.view_list_assets())
        # print()
        c.add_request(
            asset_name="The House",
            request_name="Mouse traps",
            due_by=pendulum.now().add(weeks=2),
            recurrence_period=timedelta(weeks=4),
        )
        c.add_request(
            asset_name="The House",
            request_name="Rat traps",
            due_by=pendulum.now(),
            recurrence_period=timedelta(weeks=4),
        )
        c.add_request(
            asset_name="The House2",
            request_name="Mouse traps2",
            due_by=pendulum.now().add(weeks=2),
            recurrence_period=timedelta(weeks=4),
        )
        # print(c.view_list_requests())
        # print()
        # c.view_create_request()
        # c.view_list_asset_requests()
        return c
    else:
        path_db_asset = Path.home() / Path(".gofixit/asset.json")
        path_db_request = Path.home() / Path(".gofixit/request.json")

        db_asset = TinyDB(path_db_asset)
        db_asset = GoFixItDB(db_asset)

        db_request = TinyDB(path_db_request)
        db_request = GoFixItDB(db_request)

        return Controller(db_asset=db_asset,
                          db_request=db_request,
                          view=ViewTabulate())
Example #14
0
def post_data():
    blue = request.form.get('blue')
    green = request.form.get('green')
    red = request.form.get('red')

    db = TinyDB('db.json')
    db.purge()  # TODO: dont purge
    db.insert({'red': red, 'green': green, 'blue': blue})

    return Response(response="ok", status="200")
Example #15
0
class AbstractRepository(ABC):
    def __init__(self):
        self.db = TinyDB('./db.json')

    def close(self):
        self.db.close()

    def delete_repo(self):
        self.db.purge_tables()
        self.db.purge()
Example #16
0
def SpidInit():
    if not os.path.exists("DBFiles"):
        os.makedirs("DBFiles")
    dbQ = TinyDB('DBFiles/DatabaseQueue.json')
    dbQ.purge()
    process = CrawlerProcess({
        'USER_AGENT': 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)'
    })
    process.crawl(Spider)
    process.start()
Example #17
0
class Test_006_Modify_Not_existing_data_by_valid_query_Function(unittest.TestCase):
	def setUp(self):
		self.db = TinyDB('db.json')

	def tearDown(self):
		self.db.purge()
		self.db.all()

	def test_simple_modify_not_exist(self):
		print("case 6 modify Non-existing data by valid query")
		result=self.db.update({'int': 10}, where('Name') == 'Wendy')
		self.assertEqual(result,None)
Example #18
0
class Test_007_Delete_Not_existing_data_by_valid_query_Function(unittest.TestCase):
	def setUp(self):
		self.db = TinyDB('db.json')

	def tearDown(self):
		self.db.purge()
		self.db.all()

	def test_simple_delete_not_exist(self):
		print("case 7 Delete Non-existing data by valid query")
		result=self.db.remove(where('Name') == 'Wendy')
		self.assertEqual(result,None)	
Example #19
0
class Test_005_Search_Not_existing_data_by_valid_query_Function(unittest.TestCase):
	def setUp(self):
		self.db = TinyDB('db.json')

	def tearDown(self):
		self.db.purge()
		self.db.all()

	def test_simple_search_not_exist(self):
		print("case 5 search Non-existing data by valid query")
		result=self.db.search(where('Name') == 'Wendy')
		self.assertEqual(result,[])
Example #20
0
class Test_001_Insert_by_valid_query_Function(unittest.TestCase):
	def setUp(self):
		self.db = TinyDB('db.json')

	def tearDown(self):
		self.db.purge()
		self.db.all()

	def test_simple_insert_valid_exist(self):
		print("case 1 insert data by valid query")
		self.db.insert({'Name': 'Greg', 'Email': '*****@*****.**', 'int' : 1, 'char':1})
		result=self.db.search(where('Name') == 'Greg')
		self.assertEqual(result,[{'Name': 'Greg', 'Email': '*****@*****.**', 'int' : 1, 'char':1}])
class TinyDBGateway(AbstractJSONStorageGateway):
    def __init__(self, file_path: str, table_name: str = "_default") -> None:
        self.table = TinyDB(file_path).table(table_name)

    def create(self, data: dict, max_retries: int = 10) -> dict:
        with transaction(self.table) as tr:
            while max_retries > 0:
                uuid = uuid4()
                if not self.table.contains(where('uuid') == str(uuid)):
                    data.update(uuid=str(uuid))
                    tr.insert(data)
                    return data
                else:
                    max_retries -= 1
            raise StorageError('could not set unique UUID')

    def list_(self) -> list:
        return self.table.all()

    def retrieve(self, uuid: str) -> dict:
        record = self.table.get(where('uuid') == uuid)
        if record:
            return record
        else:
            raise NoResultFound('object do not exist')

    def update(self, uuid: str, data: dict):
        with transaction(self.table) as tr:
            record = self.table.get(where('uuid') == uuid)
            if record:
                tr.update(data, where('uuid') == uuid)
            else:
                raise NoResultFound('object do not exist')

    def delete(self, uuid: str):
        with transaction(self.table) as tr:
            record = self.table.get(where('uuid') == uuid)
            if record:
                tr.remove(where('uuid') == uuid)
            else:
                raise NoResultFound('object do not exist')

    def purge(self):
        self.table.purge()

    def search(self, conditions: dict):
        return self.table.search(
            reduce(lambda x, y: x & y,
                   [where(k) == v for k, v in conditions.items()]))
Example #22
0
    def make_dep_json(self):
        """
        Get the dependencies of all packages installed and cache the result in a json.
        :return: a tinydb database
        """
        deptree = subprocess.getoutput(
            'pipdeptree -j'
        )  # run pipdeptree (python module) on the terminal: outputs json
        pack_json = json.loads(deptree)  # load json to python environment

        pack_db = TinyDB("pack_db.json")
        pack_db.purge(
        )  # the method clears the database on every call, avoiding rewrites of packages (duplicates)
        pack_db.insert_multiple(pack_json)
        return pack_db
Example #23
0
def recreate_db():
    # Base
    import uuid

    # Flask
    from flask import Flask, jsonify, request
    from flask_cors import CORS

    # DB
    from tinydb import TinyDB, Query

    db = TinyDB('./database.json')
    db.purge()
    db.insert_multiple([{
        'id': uuid.uuid4().hex,
        'name': 'Jack Kerouac',
        'email': '*****@*****.**',
        'address': 'Park Drive,Boston, MA',
        'lat': 42.341590,
        'long': -71.097740,
        'request': 'Lack of diapers in my area, can anyone help?',
        'requestType': 'shopping',
        'needHelp': False,
        'canHelp': False,
    }, {
        'id': uuid.uuid4().hex,
        'name': 'J. K. Rowling',
        'email': '*****@*****.**',
        'address': '23 Aldie St., Allston, MA 02134',
        'lat': 42.358960,
        'long': -71.135920,
        'request': 'Can someone watch my kid from 2-3pm tomorrow?',
        'requestType': 'inHouseHelp',
        'needHelp': False,
        'canHelp': False
    }, {
        'id': uuid.uuid4().hex,
        'name': 'Ben B',
        'email': '*****@*****.**',
        'address': '1000 Commonwealth Ave., Boston, MA 02135',
        'lat': 42.349420,
        'long': -71.132920,
        'request':
        'I need a baby sitter this Friday (3/20/2020) from 1-2pm/ Is anyone available?',
        'requestType': 'inHouseHelp',
        'needHelp': False,
        'canHelp': False
    }])
Example #24
0
def convert(semester: str):
    """The first command line argument should be the name of the dataset to
    convert (e.g. S19, F17). The name of the input JSON file and output db file
    will be constructed in a systematic way from the dataset name"""

    inPath = datasets.datasetFilename(semester)
    outPath = datasets.databaseFilename(semester)

    with open(inPath, "r", encoding="utf-8") as inFile:
        data = json.load(inFile)

    db = TinyDB(outPath, encoding="utf-8")
    # Delete existing records
    db.purge()
    for d in data:
        db.insert(d)
Example #25
0
class JakeDB():
    def __init__(self, path):
        self.db = TinyDB(path)

    def insert(self, name, **kwargs):
        # name must unique
        if self.exists(name):
            return -1
        dict = {'name': name}
        for key, value in kwargs.items():
            dict[key] = value
        return self.db.insert(dict)

    def get(self, name):
        return self.db.search(Query().name == name)

    def delete(self, name):
        return self.db.remove(Query().name == name)

    def update(self, name, **kwargs):
        self.delete(name)
        self.insert(name, **kwargs)

    def exists(self, name):
        if len(self.get(name)) == 0:
            return False
        return True

    def get_all(self):
        return self.db.all()

    def delete_all(self):
        return self.db.purge()
Example #26
0
def update_db(new_results: List[Dict], db: TinyDB, ids: List[Dict],
              article: Query) -> None:
    for result in new_results:
        # if sub present, update it, else insert it
        db.upsert(result, article.sub_id == result["sub_id"])

    # copy all db
    all_db = db.all()
    logger.warning('DB original length: %s', len(all_db))
    # remove old db entry not present in the last search
    filtered_db = [x for x in all_db if x in ids]
    db.purge()
    for db_id in filtered_db:
        # reinitialize db
        db.insert(db_id)
    logger.warning('DB final length: %s', len(db.all()))
Example #27
0
async def update_ranking():
    await client.wait_until_ready()
    while not client.is_closed:
        #Loop trough servers
        for server in client.servers:
            updating = True
            #print("update_ranking: "+server.id)
            logger.info('Updating ranking %s', server.id)
            #print(server.member_count)
            db_activity = TinyDB('data/activity.json', storage=serialization)
            Activity = Query()
            result = db_activity.search(Activity.server == server.id)
            #Fill ranking table with authors message count
            db_ranking = TinyDB('data/ranking-' + server.id + '.json',
                                storage=serialization)
            db_ranking.purge()
            for r in result:
                Ranking = Query()
                if (db_ranking.contains(Ranking.author == r['author'])):
                    message_type = r['type']
                    db_ranking.update(increment('count'),
                                      Ranking.author == r['author'])
                    db_ranking.update(
                        add('points', get_points(r['date'], message_type)),
                        Ranking.author == r['author'])
                    db_ranking.update({'last': r['date']},
                                      Ranking.author == r['author'])
                else:
                    message_type = r['type']
                    db_ranking.insert({
                        'author':
                        r['author'],
                        'count':
                        1,
                        'last':
                        r['date'],
                        'points':
                        get_points(r['date'], message_type)
                    })

            logger.info('Updated ranking %s for %s members', server.id,
                        len(db_ranking))
            db_ranking.close()
            db_activity.close()
            updating = False
        #Wait for 300 seconds to avoid problems in
        await asyncio.sleep(300)
Example #28
0
def main(url):
    global total_added
    db = TinyDB("db-example.json")
    db.purge()

    # while url:
    print("Web Page: ", url)
    soup = soup_process(url, db)
    # nextlink = soup.find("link", rel="next")
    #
    # url = False
    # if (nextlink):
    #     url = nextlink['href']

    print("Added ", total_added)

    make_excel(db)
Example #29
0
def add_table(Coin_symbol, Tag, table_name, kuerzel, quelle, close_array, volume_array, volume_avrage, close_avrage):
    if len(close_array)==len(volume_array):
        db_kalender = TinyDB('aa_Tabellen/'+table_name+'.json')
        db_administartor= TinyDB('aa_Tabellen/administartor.json')
        set_count = db_administartor.count(where('symbol') == Coin_symbol)
        if set_count == 0:
            db_administartor.insert({'symbol': Coin_symbol ,'volume_avrage': volume_avrage, 'close_avrage': close_avrage})
        db_kalender.purge()
        for i in np.arange(len(close_array)):
            db_kalender.insert({"aa_Tag": Tag[i], "kuerzel": Coin_symbol, "fullName": Coin_symbol, "close": close_array[i], "volume": volume_array[i]})
    else:
        win32api.MessageBox(0,"fehler g69j. array len sind ungleich" + str(len(close_array))+ " "+ str(len(volume_array)), 'Insert Fehler')
    if len(close_array)==len(db_kalender):
        #win32api.MessageBox(0,"Table Neueintrag erfolgreich", '!')
        print(str(len(close_array))+" "+Coin_symbol+" Table Neueintrag erfolgreich "+ str(len(db_administartor)) )
    else:
        win32api.MessageBox(0,Coin_symbol+ " close_array(" + str(len(close_array))+ ")  table_sets("+ str(len(db_kalender))+") passen nicht", '!')
Example #30
0
def filter_file_index(file_index_db, date=None, sample=None, energy=None,
                      query=None):
    files_query = Query()
    temp_db = TinyDB(storage=MemoryStorage)
    query_cmds = []
    if date:
        query_cmds.append(files_query.date == date)
    if sample:
        query_cmds.append(files_query.sample == sample)
    if energy:
        query_cmds.append(files_query.energy == energy)
    for query_cmd in query_cmds:
        if query is not None:
            query_cmd &= query
        records = file_index_db.search(query_cmd)
        temp_db.purge()
        temp_db.insert_multiple(records)
    return temp_db
Example #31
0
class Test_008_Insert_exits_data_Function(unittest.TestCase):

	def setUp(self):
		self.db = TinyDB('db.json')

	def tearDown(self):
		self.db.purge()
		self.db.all()

	def test_simple_insert_by_query(self):
		print("case 8 can insert existing data")
		self.db.insert({'Name': 'Yingyu Wu', 'Email': '*****@*****.**', 'int' : 1, 'char':1})
		self.db.insert({'Name': 'Yingyu Wu', 'Email': '*****@*****.**', 'int' : 1, 'char':1})
		result_array = self.db.search(where('Name') == 'Yingyu Wu')
		num = len(result_array)
		#print (result_array)
		#print("search one key,get %d result" %(num))
		self.assertEqual(2,num)
Example #32
0
File: add.py Project: gabeduke/add
def main(args):

    db = TinyDB('{}.json'.format(args.t))

    if args.c:
        db.purge()
        print('({}): clear'.format(args.t))
        return

    if args.r:
        print('({}): read'.format(args.t))
        for item in db:
            print(item.get('name'))
        return

    print('({}): update\n{}'.format(args.t, args.name))
    for i in args.name:
        db.insert({'name': i})
Example #33
0
class FolderManager:
    def __init__(self, path):
        self.db_file = os.path.join(path, CONF_DIR, FOLDER_DB_FN)
        self._db = TinyDB(self.db_file)

    def open_db(self):
        self._db = TinyDB(self.db_file)

    def close_db(self):
        self._db.close()

    def add_folder(self, file_name):
        if not self.folder_exists(file_name):
            entry = {'file_name': file_name}
            self._db.insert(entry)

    def get_all_entries(self):
        return self._db.all()

    def folder_exists(self, file_name):
        """ checks if a folder has been added """
        entries = self._db.search(where('file_name') == file_name)
        if entries:
            return True
        else:
            return False

    def remove_element(self, file_name):
        self._db.remove(where('file_name') == file_name)

    def get_file_names(self):
        """ returns all the file names of folders that the user has added """
        file_names = []
        for entry in self._db.all():
            file_names.append(entry['file_name'])
        return file_names

    def get_folder_by_name(self, expected_name):
        """ get documents by the specified property """
        entry = self._db.get(where('file_name') == expected_name)
        return entry

    def clear_all(self):
        self._db.purge()
class FolderManager:
    def __init__(self, path):
        self.db_file = os.path.join(path, CONF_DIR, FOLDER_DB_FN)
        self._db = TinyDB(self.db_file)

    def open_db(self):
        self._db = TinyDB(self.db_file)

    def close_db(self):
        self._db.close()

    def add_folder(self, file_name):
        if not self.folder_exists(file_name):
            entry = {'file_name': file_name}
            self._db.insert(entry)

    def get_all_entries(self):
        return self._db.all()

    def folder_exists(self, file_name):
        """ checks if a folder has been added """
        entries = self._db.search(where('file_name') == file_name)
        if entries:
            return True
        else:
            return False

    def remove_element(self, file_name):
        self._db.remove(where('file_name') == file_name)

    def get_file_names(self):
        """ returns all the file names of folders that the user has added """
        file_names = []
        for entry in self._db.all():
            file_names.append(entry['file_name'])
        return file_names

    def get_folder_by_name(self, expected_name):
        """ get documents by the specified property """
        entry = self._db.get(where('file_name') == expected_name)
        return entry

    def clear_all(self):
        self._db.purge()
Example #35
0
    def _input_handler(inp, connect4_board):

        inp = inp.split(" ")
        if inp[0] == "save" and len(inp) == 2:

            if isinstance(connect4_board.player1, NetworkPlayer) or isinstance(
                connect4_board.player2, NetworkPlayer
            ):

                connect4_board.delete_board_from_stdout()
                print(Fore.RED + "You cannot save a network game." + Fore.RESET)
                connect4_board.print_board()

                return False

            save_file = TinyDB(inp[1], storage=CachingMiddleware(JSONStorage))
            save_file.purge()

            with open("{}.player".format(hash(connect4_board.player1)), "wb") as p1:
                pickle.dump(connect4_board.player1, p1)
            with open("{}.player".format(hash(connect4_board.player2)), "wb") as p2:
                pickle.dump(connect4_board.player2, p2)

            save_file.insert(
                {
                    "fname": inp[1],
                    "board": connect4_board.current_grid_state.tolist(),
                    "current_player": connect4_board.current_player.no,
                    "player1": "{}.player".format(hash(connect4_board.player1)),
                    "player2": "{}.player".format(hash(connect4_board.player2)),
                }
            )
            save_file.close()

            return True

        elif inp[0] in ("exit", "logout", "end", "forfeit"):

            exit(Fore.GREEN + "Sorry to see you go." + Fore.RESET)

            return True

        return False
Example #36
0
def loadTinyDB():

    global db

    # Check if DB is up to date
    if os.path.getmtime('data2.json') > os.path.getmtime('data2.yml'):
        return (1)  # return, nothing to do
    pp = pprint.PrettyPrinter(indent=2)
    db = TinyDB('data2.json')
    # Clear tinyDB
    db.purge()

    # Open the YAML source files
    f = open('data2.yml')
    d = yaml.load_all(f)
    for i in d:
        pp.pprint(i)
        db.insert(i)
    db.all()
Example #37
0
def test_cutom_mapping_type_with_json(tmpdir):
    from tinydb.database import Mapping

    class CustomDocument(Mapping):
        def __init__(self, data):
            self.data = data

        def __getitem__(self, key):
            return self.data[key]

        def __iter__(self):
            return iter(self.data)

        def __len__(self):
            return len(self.data)

    # Insert
    db = TinyDB(str(tmpdir.join('test.db')))
    db.purge()
    db.insert(CustomDocument({'int': 1, 'char': 'a'}))
    assert db.count(where('int') == 1) == 1

    # Insert multiple
    db.insert_multiple([
        CustomDocument({'int': 2, 'char': 'a'}),
        CustomDocument({'int': 3, 'char': 'a'})
    ])
    assert db.count(where('int') == 1) == 1
    assert db.count(where('int') == 2) == 1
    assert db.count(where('int') == 3) == 1

    # Write back
    doc_id = db.get(where('int') == 3).doc_id
    db.write_back([CustomDocument({'int': 4, 'char': 'a'})], [doc_id])
    assert db.count(where('int') == 3) == 0
    assert db.count(where('int') == 4) == 1
Example #38
0
# -*- coding: utf-8 -
from tinydb import TinyDB, Query

db = TinyDB('db/dataset_category.json')
db.purge()
db.insert({'text': 'ストーリーについて', 'label': 1})
db.insert({'text': 'あらすじについて知りたい', 'label': 1})
db.insert({'text': 'あらすじ', 'label': 1})
db.insert({'text': '話について', 'label': 1})
db.insert({'text': 'どんな話だっけ', 'label': 1})
db.insert({'text': 'どんなストーリーだったか', 'label': 1})
db.insert({'text': 'ストーリー', 'label': 1})
db.insert({'text': '各話のあらすじ', 'label': 1})
db.insert({'text': 'どんな話か', 'label': 1})
db.insert({'text': '登場人物', 'label': 2})
db.insert({'text': '人物について', 'label': 2})
db.insert({'text': 'キャラクター', 'label': 2})
db.insert({'text': 'キャラについて', 'label': 2})
db.insert({'text': '登場する人', 'label': 2})
db.insert({'text': '主人公', 'label': 2})
db.insert({'text': '人', 'label': 2})
db.insert({'text': '人について', 'label': 2})
db.insert({'text': 'キャラ', 'label': 2})
db.insert({'text': '登場人物について', 'label': 2})
db.insert({'text': 'どんな人', 'label': 2})

#print db.all()

db = TinyDB('db/dataset_answer.json')
db.purge()
db.insert({'category_id': 1, 'text': 'りょうについて', 'label': 1})
Example #39
0
class DocumentManager:
    def __init__(self, path):
        self.db_file = os.path.join(path, CONF_DIR, DB_FN)
        self._db = TinyDB(self.db_file)

    def open_db(self):
        self._db = TinyDB(self.db_file)

    def close_db(self):
        self._db.close()

    def doc_exists(self, file_name, title):
        entries = self._db.search((where('file_name') == file_name) & (where('name') == title))
        if entries:
            return True
        else:
            return False

    def is_doc_new(self, file_name):
        file_name_exists = self._db.search(where('file_name') == file_name)
        if not file_name_exists:
            return True
        return False

    ''' receives a translation file and checks if there are corresponding source files'''
    def is_translation(self, file_name, title, matched_files, actions):
        ''' check if the file is a translation file'''

        for myFile in matched_files:
            relative_path = actions.norm_path(myFile)
            myFileTitle = os.path.basename(relative_path)

            ''' only compare the file being checked against source files that have already been added '''
            entry = self._db.get(where("file_name") == relative_path)
            if entry:
                ''' check the source file's download codes to see if the file being checked is a translation file '''
                downloads = self.get_doc_downloads(relative_path)
                if downloads:
                    for d in downloads:
                        ''' append the download code to the source file for comparison '''
                        temp = myFileTitle.split(".")
                        newString = temp[0]+"."+ d +"."+temp[1]
                        if newString == title:
                            return True

        return False

    ''' receives a source file and finds the source files associated with it '''
    #def delete_local_translations(self, file_name, path, actions):


    def is_doc_modified(self, file_name, path):
        entry = self._db.get(where('file_name') == file_name)
        full_path = os.path.join(path, file_name)
        last_modified = os.stat(full_path).st_mtime
        if entry and entry['added'] < last_modified and entry['last_mod'] < last_modified:
            return True
        return False

    def add_document(self, title, create_date, doc_id, sys_mtime, last_mod, file_name):
        entry = {'name': title, 'added': create_date, 'id': doc_id,
                 'sys_last_mod': sys_mtime, 'last_mod': last_mod, 'file_name': file_name,
                 'downloaded': []}
        self._db.insert(entry)

    def update_document(self, field, new_val, doc_id):
        if type(new_val) is list:
            self._db.update(_update_entry_list(field, new_val), where('id') == doc_id)
        else:
            if type(new_val) is set:
                new_val = list(new_val)
            self._db.update({field: new_val}, where('id') == doc_id)

    def get_doc_by_prop(self, prop, expected_value):
        """ get documents by the specified property """
        entry = self._db.get(where(prop) == expected_value)
        return entry

    def get_all_entries(self):
        return self._db.all()

    def get_doc_ids(self):
        """ returns all the ids of documents that the user has added """
        doc_ids = []
        for entry in self._db.all():
            doc_ids.append(entry['id'])
        return doc_ids

    def get_file_names(self):
        """ returns all the file names of documents that the user has added """
        file_names = []
        for entry in self._db.all():
            file_names.append(entry['file_name'])
        return file_names

    def get_names(self):
        """ returns all the names of documents that the user has added """
        file_names = []
        for entry in self._db.all():
            file_names.append(entry['name'])
        return file_names

    def get_doc_name(self, file_name):
        """ returns the file name of a document for a given file path """
        entry = self._db.get(where("file_name") == file_name)
        if entry:
            return entry['name']
        else:
            return None

    def get_doc_locales(self, file_name):
        """ returns the target locales of a document for a given file """
        locales = []
        entry = self._db.get(where("file_name") == file_name)
        if entry:
            locales.append(entry['locales'])

        return locales

    def get_doc_downloads(self, file_name):
        """ returns all the downloaded translations for a given file """
        entry = self._db.get(where("file_name") == file_name)
        if entry:
            downloads = entry['downloaded']
            return downloads

    def remove_element(self, doc_id):
        self._db.remove(where('id') == doc_id)

    def clear_prop(self, doc_id, prop):
        """ Clear specified property of a document according to its type """
        entry = self._db.get(where('id') == doc_id)
        if isinstance(entry[prop],str):
            self.update_document(prop,"",doc_id)
        elif isinstance(entry[prop],int):
            self.update_document(prop,0,doc_id)
        elif isinstance(entry[prop],list):
            self.update_document(prop,[],doc_id)
        elif isinstance(entry[prop],dict):
            self.update_document(prop,{},doc_id)

    def remove_element_in_prop(self, doc_id, prop, element):
        doc_prop = self.get_doc_by_prop('id', doc_id)[prop]
        if element in doc_prop:
            doc_prop.remove(element)
        self.update_document(prop, doc_prop, doc_id)

    def add_element_to_prop(self, doc_id, prop, element):
        doc_prop = self.get_doc_by_prop('id',doc_id)[prop]
        if element not in doc_prop:
            doc_prop.append(element)
        self.update_document(prop, doc_prop, doc_id)

    def clear_all(self):
        self._db.purge()
Example #40
0
class TinyDbWrapper(object):
    def __init__(self, db_name, purge = False):
        self.db = TinyDB(db_name)
        if purge:
            self.db.purge()
Example #41
0
class DataStore:
    """
    Network layer storage for payload received and sent by aDTN.
    Payload retrieved from the datastore are chosen according to a fairness heuristic in order to give least
    popular objects in the network a chance to spread.
    """
    def __init__(self, db_filename=DEFAULT_DATABASE_FN, size_threshold=None):
        """
        Initialize data store.
        :param size_threshold: maximum storage size, in number of data objects
        :param db_filename: name of the file where the data is stored
        """
        self.size_threshold = size_threshold
        path = Path(db_filename).parent
        if not path.exists():
            path.mkdir(parents=True)
        self.db = TinyDB(db_filename + ".json")
        self.stats = self.db.table('stats')
        self.data = self.db.table('messages')
        self.lock = RLock()

    def add_object(self, data):
        """
        Attempt to insert a data object into the store. If it does not exist, it gets initialized. Otherwise the
        statistics are updated by increasing the receive count and the time of the last reception if the message has
        not been flagged as deleted.
        :param data: data object to store
        """
        idx = hash_string(data)
        now = int(time.time())
        with self.lock:
            Stats = Query()
            res = self.stats.search(Stats.idx == idx)
            if len(res) == 0:
                self.data.insert({'idx': idx, 'content': data})
                self.stats.insert({'idx': idx,
                                   'first_seen': now,
                                   'receive_count': 0,
                                   'send_count': 0,
                                   'last_received': None,
                                   'last_sent': None,
                                   'deleted': False})
                log_debug("Data object created: {}".format(data))
            else:
                deleted = res[0]['deleted']
                if deleted:
                    log_debug("Received deleted data object: {}".format(data))
                self.stats.update({'last_received': now}, Stats.idx == idx)
                self.stats.update(increment('receive_count'), Stats.idx == idx)
                log_debug("Data object updated: {}".format(data))

    def get_data(self):
        """
        Retrieve the data objects sorted by increasing popularity, namely in increasing receive_count, then send_count
        and finally the last time they were sent by the current aDTN node.
        :return: data objects sorted by increasing popularity.
        """
        with self.lock:
            Stats = Query()
            stats = self.stats.search(Stats.deleted == False)
            res = sorted(stats, key=lambda x: (x['receive_count'], x['send_count'], x['last_sent']))[:10]
            now = int(time.time())
            objects = []
            for r in res:
                idx = r['idx']
                Objects = Query()
                obj = self.data.search(Objects.idx == idx)[0]['content']
                objects.append(obj)
                self.stats.update({'last_sent': now}, Objects.idx == idx)
                self.stats.update(increment('send_count'), Objects.idx == idx)
        return objects

    def delete_data(self, object_id):
        """
        Delete a data object given its ID.
        :param object_id: ID of the data object to delete.
        """
        with self.lock:
            Stats = Query()
            Message = Query()
            res = self.stats.search(Stats.idx == object_id)
            self.stats.update({'deleted': True}, Stats.idx == object_id)
            record = self.data.get(Message.idx == object_id)
            if record is not None:
                self.data.remove(eids=[record.eid])
                log_debug("Deleted message: {}".format(object_id))
            else:
                log_debug("No data to delete: {}".format(object_id))

    def list_objects(self):
        """
        Print a list of data objects preceded by its object ID.
        """
        with self.lock:
            objects = ms.data.all()
            for obj in objects:
                print("{}\t{}".format(obj['idx'], obj['content']))

    def wipe(self):
        """
        Empty the data store.
        """
        with self.lock:
            self.stats.purge()
            self.data.purge()
            self.db.purge()
Example #42
0
 def save(self, db_path):
     db = TinyDB(db_path)
     db.purge()
     db.insert({'root': self._data_to_store()})
class TweetCollector(object):
    def __init__(self, agencies, token, token_secret, consumer, consumer_secret):
        self.agencies = agencies
        self.access_token = token
        self.access_token_secret = token_secret
        self.consumer_key = consumer
        self.consumer_secret = consumer_secret
        self._refresh = 10 * 60
        self._tweets = TinyDB('./tweets.json')
        self._tweets.purge()
        self._auth = tweepy.OAuthHandler(self.consumer_key, self.consumer_secret)
        self._auth.set_access_token(self.access_token, self.access_token_secret)
        self._api = tweepy.API(self._auth)

        thread = threading.Thread(target=self.run, args=())
        thread.daemon = True
        thread.start()

    def _status(self, handle):
        try:
            user = self._api.get_user(handle)
        except tweepy.TweepError as e:
            print('Tweeter server error ', e.response, ' for handle: ', handle)
            return []

        if hasattr(user, 'status'):
            return user.status
        else:
            return []

    @staticmethod
    def _process(status):
        clean_status = re.sub(r'\w+:\/{2}[\d\w-]+(\.[\d\w-]+)*(?:(?:\/[^\s/]*))*', '', status._json['text'],
                               flags=re.MULTILINE)

        clean_status = re.sub('@[\w.]+', '', clean_status, flags=re.MULTILINE)

        tokenized_docs = word_tokenize(clean_status)

        regex = re.compile('[%s]' % re.escape(string.punctuation))

        tokenized_docs_no_punctuation = []

        for token in tokenized_docs:
            new_token = regex.sub(u'', token)
            if not new_token == u'':
                if new_token not in stopwords.words('english') and new_token != 'RT' and new_token != '...':
                    tokenized_docs_no_punctuation.append(new_token)

        status._json['tokens'] = tokenized_docs_no_punctuation

        return status

    def _single_execute(self):
        print(datetime.now().time())
        for agency in self.agencies.search(where('handle')):
            tweeter_handle = agency['handle']
            status = self._status(tweeter_handle)
            if status:
                status_p = self._process(status)
                if not self._tweets.search(where('id') == status_p._json['id']) or not self._tweets.all():
                    self._tweets.insert(status_p._json)
            else:
                continue

    def run(self):
        while True:
            self._single_execute()
            time.sleep(self._refresh)
Example #44
0
class xlibris(object):
    def __init__(self, directory = 'xlibris/'):
        self.directory = directory
        if not os.path.exists(self.directory):
            os.mkdir(self.directory)
    def new(self,dbname):
        '''Creates a new database assigns it to a current database instance to be used by all the other functions
            Usage:-
            new('database name') or new database_name
        '''
        self.db = TinyDB(self.directory + dbname + '.json')
        print 'New database {} created at {}'.format(dbname, self.directory + dbname + '.json' )
    def connect(self,name):
        '''Connect to an existing database for updating/Query
            Usage:-
            connect('name') or connect name
            where 'name' is the name of the existing database'''

        self.db = TinyDB(self.directory + name + '.json')
        print 'Connected to {}'.format(name)
    def display(self):
        try:
            print tabulate(_concat(self.db.all()), headers='keys', tablefmt="simple")
            print "\n"
            print self.count()
        except AttributeError:
            print '''No database Connected,
             to see a list of available databases use list_db
             or to make a new database use new'''

    def add(self, ISBN):
        '''Add books to the current working database
            Usage:-
            add(ISBN) or add ISBN'''
        if _doesexist(self.db, ISBN) == False:
            try:
                bookData = meta(ISBN)
                bookData = _cleanify(bookData)
                bookData['Date Added'] = _today()
                self.db.insert(bookData)
                print 'ISBN {} inserted'.format(ISBN)
            except:
                print 'ISBN {} not found. Please add details manually- '.format(ISBN)
                self.madd()
        else:
            print 'Book Already Exists'
    def madd(self):
        bookData = {}
        bookData['Authors'] = raw_input('Authors Name: ')
        bookData['ISBN'] = raw_input('ISBN: ')
        bookData['Language'] = raw_input('Language: ')
        bookData['Publisher'] = raw_input('Publisher: ')
        bookData['Title'] = raw_input('Title: ')
        bookData['Year'] = raw_input('Year: ')
        bookData['Date Added'] = _today()
        self.db.insert(bookData)
    def search(self, keyword):
        NewSearch = Query()
        title = self.db.search(NewSearch.Title == keyword)
        auth = self.db.search(NewSearch.Authors == keyword)
        pub = self.db.search(NewSearch.Publisher == keyword)
        isbn = self.db.search(NewSearch.ISBN == keyword)
        ttable = [title, auth, pub, isbn]

        for i in ttable:
            if i:
                print 'Matches Found for {} \n'.format(keyword)
                print tabulate(_concat(i), headers='keys', tablefmt="fancy_grid")
    def _blkadd(self, ISBNlist):
        with tqdm(ISBNlist) as pbar:
            for i in pbar:
                pbar.set_description("Adding %s "%i)
                self.add(i)
                pbar.update(1/len(ISBNlist)*100)
    def add_from_file(self, filename):
        with open(filename, 'rb') as f:
            raw = reader(f)
            final = list(raw)
        for i in range(len(final)):
            final[i] = str(final[i][0])
        self._blkadd(final)
        print 'Done'
    def change_title(self, isbn):
        tmp = Query()
        def change(field):
            def transform(element):
                element[field] = raw_input('Enter Title ')
            return transform
        title = self.db.search(tmp.ISBN == isbn)[0]
        print 'Change title of :- {}'.format(title['Title'])
        self.db.update(change('Title'), tmp.ISBN == isbn )
        print 'Entry Updated'
    def change_author(self, isbn):
        tmp = Query()
        def change(field):
            def transform(element):
                element[field] = raw_input('Enter Author ')
            return transform
        title = self.db.search(tmp.ISBN == isbn)[0]
        print 'Change author of :- {}'.format(title['Title'])
        self.db.update(change('Authors'), tmp.ISBN == isbn )
        print 'Entry Updated'
    def change_publisher(self, isbn):
        tmp = Query()
        def change(field):
            def transform(element):
                element[field] = raw_input('Enter Publisher ')
            return transform
        title = self.db.search(tmp.ISBN == isbn)[0]
        print 'Change Publisher of :- {}'.format(title['Title'])
        self.db.update(change('Publisher'), tmp.ISBN == isbn )
        print 'Entry Updated'
    def write_to_file(self, filename):
        try:
            data = tabulate(_concat(self.db.all()), headers='keys', tablefmt="simple")
        except AttributeError:
            print 'No database Connected'
        f = open('xlibris/' + filename + '.txt', 'w')
        f.write(data.encode('utf8'))
        f.write('\n'.encode('utf8'))
        f.write('--------------------'.encode('utf8'))
        f.write('\n'.encode('utf8'))
        f.write(self.count().encode('utf8'))
        f.close()
        print 'Written to {}'.format('xlibris/' + filename + '.txt')
    def purge_current(self):
        self.db.purge()
    def remove(self, isbn):
        tmp = Query()
        resp = raw_input('Delete \n {} \n ? (y/n)'.format(tabulate(self.db.search(tmp.ISBN == isbn), headers='keys')))
        resp = resp.lower()
        if resp == 'y':
            for i in ['Publisher', 'Title', 'Authors', 'Year', 'Date Added', 'Language', 'ISBN']:
                self.db.update(delete(i), tmp.ISBN == isbn)
            print 'Deleted'
        elif resp == 'n':
            print 'Spared'
    def lookup(self, keyword):
        data = _concat(self.db.all())
        title = data.pop('Title')
        auth = data.pop('Authors')
        choices = title + auth
        searchKey = process.extractBests(keyword, choices)
        for i in searchKey:
            if i[1] >= 90:
                self.search(i[0])
    def count(self):
        listisbn = _concat(self.db.all())
        listisbn = listisbn.pop('ISBN')
        return "Total {} books.".format(len(listisbn))
Example #45
0
def cleanup( dbName ):
   db = TinyDB( dbName )
   db.purge()
class DocumentManager:
    def __init__(self, path):
        self.db_file = os.path.join(path, CONF_DIR, DB_FN)
        self._db = TinyDB(self.db_file)

    def open_db(self):
        self._db = TinyDB(self.db_file)

    def close_db(self):
        self._db.close()

    def doc_exists(self, file_name, title):
        entries = self._db.search((where('file_name') == file_name) & (where('name') == title))
        if entries:
            return True
        else:
            return False

    def is_doc_new(self, file_name):
        file_name_exists = self._db.search(where('file_name') == file_name)
        if not file_name_exists:
            return True
        return False

    def is_doc_modified(self, file_name, path):
        entry = self._db.get(where('file_name') == file_name)
        full_path = os.path.join(path, file_name)
        last_modified = os.stat(full_path).st_mtime
        if entry and entry['added'] < last_modified and entry['last_mod'] < last_modified:
            return True
        return False

    def add_document(self, title, create_date, doc_id, sys_mtime, last_mod, file_name):
        entry = {'name': title, 'added': create_date, 'id': doc_id,
                 'sys_last_mod': sys_mtime, 'last_mod': last_mod, 'file_name': file_name,
                 'downloaded': []}
        self._db.insert(entry)

    def update_document(self, field, new_val, doc_id):
        if type(new_val) is list:
            self._db.update(_update_entry_list(field, new_val), where('id') == doc_id)
        else:
            if type(new_val) is set:
                new_val = list(new_val)
            self._db.update({field: new_val}, where('id') == doc_id)

    def get_doc_by_prop(self, prop, expected_value):
        """ get documents by the specified property """
        entry = self._db.get(where(prop) == expected_value)
        return entry

    def get_all_entries(self):
        return self._db.all()

    def get_doc_ids(self):
        """ returns all the ids of documents that user has added """
        doc_ids = []
        for entry in self._db.all():
            doc_ids.append(entry['id'])
        return doc_ids

    def remove_element(self, doc_id):
        self._db.remove(where('id') == doc_id)

    def clear_all(self):
        self._db.purge()
Example #47
0
def seed():
    db = TinyDB('db/dataset.json')
    db.purge()
    db.insert({'employee_id': 6, 'day_of_week':1, 'attend': 1})
    db.insert({'employee_id': 0, 'day_of_week':1, 'attend': 0})
    db.insert({'employee_id': 1, 'day_of_week':1, 'attend': 0})
    db.insert({'employee_id': 2, 'day_of_week':1, 'attend': 1})
    db.insert({'employee_id': 3, 'day_of_week':1, 'attend': 1})
    db.insert({'employee_id': 4, 'day_of_week':1, 'attend': 0})
    db.insert({'employee_id': 5, 'day_of_week':1, 'attend': 1})
    db.insert({'employee_id': 6, 'day_of_week':1, 'attend': 0})
    db.insert({'employee_id': 0, 'day_of_week':1, 'attend': 1})
    db.insert({'employee_id': 1, 'day_of_week':1, 'attend': 0})
    db.insert({'employee_id': 2, 'day_of_week':1, 'attend': 0})
    db.insert({'employee_id': 3, 'day_of_week':1, 'attend': 1})
    db.insert({'employee_id': 4, 'day_of_week':1, 'attend': 1})
    db.insert({'employee_id': 5, 'day_of_week':1, 'attend': 0})
    db.insert({'employee_id': 6, 'day_of_week':1, 'attend': 1})
    db.insert({'employee_id': 0, 'day_of_week':1, 'attend': 1})
    db.insert({'employee_id': 1, 'day_of_week':1, 'attend': 0})
    db.insert({'employee_id': 2, 'day_of_week':1, 'attend': 0})
    db.insert({'employee_id': 3, 'day_of_week':1, 'attend': 1})
    db.insert({'employee_id': 4, 'day_of_week':1, 'attend': 1})
    db.insert({'employee_id': 5, 'day_of_week':1, 'attend': 0})
    db.insert({'employee_id': 6, 'day_of_week':1, 'attend': 1})
    db.insert({'employee_id': 0, 'day_of_week':1, 'attend': 0})
    db.insert({'employee_id': 1, 'day_of_week':1, 'attend': 1})
    db.insert({'employee_id': 2, 'day_of_week':1, 'attend': 1})
    db.insert({'employee_id': 3, 'day_of_week':1, 'attend': 0})
    db.insert({'employee_id': 4, 'day_of_week':1, 'attend': 0})
    db.insert({'employee_id': 5, 'day_of_week':1, 'attend': 1})
    db.insert({'employee_id': 6, 'day_of_week':1, 'attend': 0})
    db.insert({'employee_id': 0, 'day_of_week':1, 'attend': 1})
    db.insert({'employee_id': 1, 'day_of_week':1, 'attend': 0})
    db.insert({'employee_id': 6, 'day_of_week':2, 'attend': 1})
    db.insert({'employee_id': 0, 'day_of_week':2, 'attend': 1})
    db.insert({'employee_id': 1, 'day_of_week':2, 'attend': 0})
    db.insert({'employee_id': 2, 'day_of_week':2, 'attend': 0})
    db.insert({'employee_id': 3, 'day_of_week':2, 'attend': 1})
    db.insert({'employee_id': 4, 'day_of_week':2, 'attend': 1})
    db.insert({'employee_id': 5, 'day_of_week':2, 'attend': 0})
    db.insert({'employee_id': 6, 'day_of_week':2, 'attend': 1})
    db.insert({'employee_id': 0, 'day_of_week':2, 'attend': 1})
    db.insert({'employee_id': 1, 'day_of_week':2, 'attend': 1})
    db.insert({'employee_id': 2, 'day_of_week':2, 'attend': 0})
    db.insert({'employee_id': 3, 'day_of_week':2, 'attend': 0})
    db.insert({'employee_id': 4, 'day_of_week':2, 'attend': 1})
    db.insert({'employee_id': 5, 'day_of_week':2, 'attend': 1})
    db.insert({'employee_id': 6, 'day_of_week':2, 'attend': 0})
    db.insert({'employee_id': 0, 'day_of_week':2, 'attend': 1})
    db.insert({'employee_id': 1, 'day_of_week':2, 'attend': 1})
    db.insert({'employee_id': 2, 'day_of_week':2, 'attend': 0})
    db.insert({'employee_id': 3, 'day_of_week':2, 'attend': 0})
    db.insert({'employee_id': 4, 'day_of_week':2, 'attend': 1})
    db.insert({'employee_id': 5, 'day_of_week':2, 'attend': 1})
    db.insert({'employee_id': 6, 'day_of_week':2, 'attend': 1})
    db.insert({'employee_id': 0, 'day_of_week':2, 'attend': 0})
    db.insert({'employee_id': 1, 'day_of_week':2, 'attend': 1})
    db.insert({'employee_id': 2, 'day_of_week':2, 'attend': 0})
    db.insert({'employee_id': 3, 'day_of_week':2, 'attend': 1})
    db.insert({'employee_id': 4, 'day_of_week':2, 'attend': 0})
    db.insert({'employee_id': 5, 'day_of_week':2, 'attend': 1})
    db.insert({'employee_id': 6, 'day_of_week':2, 'attend': 1})
    db.insert({'employee_id': 0, 'day_of_week':2, 'attend': 1})
    db.insert({'employee_id': 1, 'day_of_week':2, 'attend': 1})
    db.insert({'employee_id': 6, 'day_of_week':3, 'attend': 1})
    db.insert({'employee_id': 0, 'day_of_week':3, 'attend': 1})
    db.insert({'employee_id': 1, 'day_of_week':3, 'attend': 1})
    db.insert({'employee_id': 2, 'day_of_week':3, 'attend': 0})
    db.insert({'employee_id': 3, 'day_of_week':3, 'attend': 0})
    db.insert({'employee_id': 4, 'day_of_week':3, 'attend': 1})
    db.insert({'employee_id': 5, 'day_of_week':3, 'attend': 1})
    db.insert({'employee_id': 6, 'day_of_week':3, 'attend': 1})
    db.insert({'employee_id': 0, 'day_of_week':3, 'attend': 0})
    db.insert({'employee_id': 1, 'day_of_week':3, 'attend': 1})
    db.insert({'employee_id': 2, 'day_of_week':3, 'attend': 1})
    db.insert({'employee_id': 3, 'day_of_week':3, 'attend': 0})
    db.insert({'employee_id': 4, 'day_of_week':3, 'attend': 0})
    db.insert({'employee_id': 5, 'day_of_week':3, 'attend': 1})
    db.insert({'employee_id': 6, 'day_of_week':3, 'attend': 1})
    db.insert({'employee_id': 0, 'day_of_week':3, 'attend': 0})
    db.insert({'employee_id': 1, 'day_of_week':3, 'attend': 1})
    db.insert({'employee_id': 2, 'day_of_week':3, 'attend': 1})
    db.insert({'employee_id': 3, 'day_of_week':3, 'attend': 0})
    db.insert({'employee_id': 4, 'day_of_week':3, 'attend': 0})
    db.insert({'employee_id': 5, 'day_of_week':3, 'attend': 1})
    db.insert({'employee_id': 6, 'day_of_week':3, 'attend': 1})
    db.insert({'employee_id': 0, 'day_of_week':3, 'attend': 1})
    db.insert({'employee_id': 1, 'day_of_week':3, 'attend': 0})
    db.insert({'employee_id': 2, 'day_of_week':3, 'attend': 0})
    db.insert({'employee_id': 3, 'day_of_week':3, 'attend': 1})
    db.insert({'employee_id': 4, 'day_of_week':3, 'attend': 1})
    db.insert({'employee_id': 5, 'day_of_week':3, 'attend': 0})
    db.insert({'employee_id': 6, 'day_of_week':3, 'attend': 1})
    db.insert({'employee_id': 0, 'day_of_week':3, 'attend': 0})
    db.insert({'employee_id': 1, 'day_of_week':3, 'attend': 1})
    db.insert({'employee_id': 6, 'day_of_week':4, 'attend': 0})
    db.insert({'employee_id': 0, 'day_of_week':4, 'attend': 0})
    db.insert({'employee_id': 1, 'day_of_week':4, 'attend': 1})
    db.insert({'employee_id': 2, 'day_of_week':4, 'attend': 1})
    db.insert({'employee_id': 3, 'day_of_week':4, 'attend': 0})
    db.insert({'employee_id': 4, 'day_of_week':4, 'attend': 0})
    db.insert({'employee_id': 5, 'day_of_week':4, 'attend': 1})
    db.insert({'employee_id': 6, 'day_of_week':4, 'attend': 1})
    db.insert({'employee_id': 0, 'day_of_week':4, 'attend': 0})
    db.insert({'employee_id': 1, 'day_of_week':4, 'attend': 0})
    db.insert({'employee_id': 2, 'day_of_week':4, 'attend': 1})
    db.insert({'employee_id': 3, 'day_of_week':4, 'attend': 1})
    db.insert({'employee_id': 4, 'day_of_week':4, 'attend': 0})
    db.insert({'employee_id': 5, 'day_of_week':4, 'attend': 1})
    db.insert({'employee_id': 6, 'day_of_week':4, 'attend': 1})
    db.insert({'employee_id': 0, 'day_of_week':4, 'attend': 0})
    db.insert({'employee_id': 1, 'day_of_week':4, 'attend': 0})
    db.insert({'employee_id': 2, 'day_of_week':4, 'attend': 1})
    db.insert({'employee_id': 3, 'day_of_week':4, 'attend': 1})
    db.insert({'employee_id': 4, 'day_of_week':4, 'attend': 0})
    db.insert({'employee_id': 5, 'day_of_week':4, 'attend': 1})
    db.insert({'employee_id': 6, 'day_of_week':4, 'attend': 0})
    db.insert({'employee_id': 0, 'day_of_week':4, 'attend': 1})
    db.insert({'employee_id': 1, 'day_of_week':4, 'attend': 0})
    db.insert({'employee_id': 2, 'day_of_week':4, 'attend': 1})
    db.insert({'employee_id': 3, 'day_of_week':4, 'attend': 0})
    db.insert({'employee_id': 4, 'day_of_week':4, 'attend': 1})
    db.insert({'employee_id': 5, 'day_of_week':4, 'attend': 1})
    db.insert({'employee_id': 6, 'day_of_week':4, 'attend': 1})
    db.insert({'employee_id': 0, 'day_of_week':4, 'attend': 0})
    db.insert({'employee_id': 1, 'day_of_week':4, 'attend': 0})