Example #1
1
class DB:
    def __init__(self,db_path):
        self.db = TinyDB(db_path)

    def add(self, data):
        # Only add it if you can't find it
        Track = Query()
        if not self.db.get(Track.display_id == data['display_id']):
            return self.db.insert(data)

    def searchById(self, video_id):
        Track = Query()
        return self.db.get(Track.display_id == video_id)

    def search(self, text):
        pattern = re.compile(text,re.IGNORECASE)
        def test(txt):
            return pattern.search(txt)

        Track = Query()
        q = Track.title.test(test) | Track.description.test(test)
        return self.db.search(q)

    def all(self):
        return self.db.all()
Example #2
0
def test_serialisation_of_pandas_dataframe(tmpdir):
    from sacred.observers.tinydb_hashfs import (DataFrameSerializer,
                                                SeriesSerializer)
    from tinydb_serialization import SerializationMiddleware

    import numpy as np
    import pandas as pd

    # Setup Serialisation object for non list/dict objects
    serialization_store = SerializationMiddleware()
    serialization_store.register_serializer(DataFrameSerializer(),
                                            'TinyDataFrame')
    serialization_store.register_serializer(SeriesSerializer(),
                                            'TinySeries')

    db = TinyDB(os.path.join(tmpdir.strpath, 'metadata.json'),
                storage=serialization_store)

    df = pd.DataFrame(np.eye(3), columns=list('ABC'))
    series = pd.Series(np.ones(5))

    document = {
        'foo': 'bar',
        'some_dataframe': df,
        'nested': {
            'ones': series
        }
    }

    db.insert(document)
    returned_doc = db.all()[0]

    assert returned_doc['foo'] == 'bar'
    assert (returned_doc['some_dataframe'] == df).all().all()
    assert (returned_doc['nested']['ones'] == series).all()
Example #3
0
    def AddUser(self, username, chatid):
        my_pokemon = [0] * 152 # Matching arr index to pokemon index (0 is disregarded)

        db = TinyDB('users.json')
        db.insert({'username': username, 'chatid': chatid, 'pokemon': my_pokemon})
        
        pass # RETURN: check bool
Example #4
0
def dragon_greet():
    print("_______________________________________________________________\n")
    time = datetime.datetime.now().time()

    global user_full_name
    global user_prefix
    global config_file

    command = "getent passwd $LOGNAME | cut -d: -f5 | cut -d, -f1"
    user_full_name = os.popen(command).read()
    user_full_name = user_full_name[:-1]  # .decode("utf8")
    home = expanduser("~")
    config_file = TinyDB(home + '/.dragonfire_config.json')
    callme_config = config_file.search(Query().datatype == 'callme')
    if callme_config:
        user_prefix = callme_config[0]['title']
    else:
        gender_config = config_file.search(Query().datatype == 'gender')
        if gender_config:
            user_prefix = GENDER_PREFIX[gender_config[0]['gender']]
        else:
            gender = Classifier.gender(user_full_name.split(' ', 1)[0])
            config_file.insert({'datatype': 'gender', 'gender': gender})
            user_prefix = GENDER_PREFIX[gender]

    if time < datetime.time(12):
        time_of_day = "morning"
    elif datetime.time(12) < time < datetime.time(18):
        time_of_day = "afternoon"
    else:
        time_of_day = "evening"
    userin.execute(["echo"], "To activate say 'Dragonfire!' or 'Wake Up!'")
    userin.say(" ".join(["Good", time_of_day, user_prefix]))
Example #5
0
def test_json_readwrite(tmpdir):
    """
    Regression test for issue #1
    """
    path = str(tmpdir.join('test.db'))

    # Create TinyDB instance
    db = TinyDB(path, storage=JSONStorage)

    item = {'name': 'A very long entry'}
    item2 = {'name': 'A short one'}

    get = lambda s: db.get(where('name') == s)

    db.insert(item)
    assert get('A very long entry') == item

    db.remove(where('name') == 'A very long entry')
    assert get('A very long entry') is None

    db.insert(item2)
    assert get('A short one') == item2

    db.remove(where('name') == 'A short one')
    assert get('A short one') is None
Example #6
0
def download(name,force=False):
    db=TinyDB(path_db_)
    temp = Query()
    data=requests.get("https://raw.githubusercontent.com/PyThaiNLP/pythainlp-corpus/master/db.json")
    data_json=data.json()
    if name in list(data_json.keys()):
        temp_name=data_json[name]
        print("Download : "+name)
        if len(db.search(temp.name==name))==0:
            print(name+" "+temp_name['version'])
            download_(temp_name['download'],temp_name['file_name'])
            db.insert({'name': name, 'version': temp_name['version'],'file':temp_name['file_name']})
        else:
            if len(db.search(temp.name==name and temp.version==temp_name['version']))==0:
                print("have update")
                print("from "+name+" "+db.search(temp.name==name)[0]['version']+" update to "+name+" "+temp_name['version'])
                yes_no="y"
                if force==False:
                    yes_no=str(input("y or n : ")).lower()
                if "y"==yes_no:
                    download_(temp_name['download'],temp_name['file_name'])
                    db.update({'version':temp_name['version']},temp.name==name)
            else:
                print("re-download")
                print("from "+name+" "+db.search(temp.name==name)[0]['version']+" update to "+name+" "+temp_name['version'])
                yes_no="y"
                if force==False:
                    yes_no=str(input("y or n : ")).lower()
                if "y"==yes_no:
                    download_(temp_name['download'],temp_name['file_name'])
                    db.update({'version':temp_name['version']},temp.name==name)
    db.close()
Example #7
0
class pcDB:
    def __init__(self,table="default"):
        #'/path/to/db.json'
        path=''
        self.table=table
        self.db = TinyDB(path).table(table)
    def insert(self,_dict):
        '''
        :return:
        '''
        self.db.insert(_dict)
        # db.insert({'int': 1, 'char': 'a'})
        # db.insert({'int': 1, 'char': 'b'})
        pass
    def getAll(self):
        '''
        not param just get all data
        :return:
        '''
        return self.db.all()
        #db.search()
        pass
    pass
#
# from tinydb.storages import JSONStorage
# from tinydb.middlewares import CachingMiddleware
# db = TinyDB('/path/to/db.json', storage=CachingMiddleware(JSONStorage))
Example #8
0
def test_serialisation_of_numpy_ndarray(tmpdir):
    from sacred.observers.tinydb_hashfs import NdArraySerializer
    from tinydb_serialization import SerializationMiddleware
    import numpy as np

    # Setup Serialisation object for non list/dict objects
    serialization_store = SerializationMiddleware()
    serialization_store.register_serializer(NdArraySerializer(), 'TinyArray')

    db = TinyDB(os.path.join(tmpdir.strpath, 'metadata.json'),
                storage=serialization_store)

    eye_mat = np.eye(3)
    ones_array = np.ones(5)

    document = {
        'foo': 'bar',
        'some_array': eye_mat,
        'nested': {
            'ones': ones_array
        }
    }

    db.insert(document)
    returned_doc = db.all()[0]

    assert returned_doc['foo'] == 'bar'
    assert (returned_doc['some_array'] == eye_mat).all()
    assert (returned_doc['nested']['ones'] == ones_array).all()
Example #9
0
def crawl(sr=0, er=3):
    archive = dict()
    url = "https://community.topcoder.com/tc?module=ProblemArchive&sr=%d&er=%d" % (sr, er)
    print "requesting seed page..."
    r = requests.get(url)
    html = h.unescape(r.content.decode('utf-8'))
    doc = pq(html)
    for i in doc('table.paddingTable2').eq(2).children()[3:]:
        round_name = pq(i).children().eq(2).find('a').text()
        sub_url = pq(i).children().eq(2).find('a').attr.href
        if sub_url is not None:
            rid = sub_url.split('rd=')[-1]
            archive[round_name] = {'rid': rid, 'round': round_name}
    db = TinyDB("data/db.json")
    tot = len(archive.values())
    cur = 0
    prob_cnt = 0
    for k in archive.values():
        problems = crawl_round(k['rid'], k['round'])
        print 'parse result:'
        for p in problems:
            for pk, pv in p.items():
                print "%-15s:   %s" % (pk, pv)
            prob_cnt += 1
            q = Query()
            if not db.search(q.name == p['name']):
                print '>>>>>>> insert problem: %s' % p['name']
                db.insert(p)
            print '-' * 10
        cur += 1
        print '*' * 10, 'finish', k['round'], ',tot rounds:', tot, 'cur round:', cur, 'round problems:', len(problems), '*' * 10
    print 'done, total round: %d, total problems: %d' % (cur, prob_cnt)
Example #10
0
def insert_test(db_file='db.json'):
    db = TinyDB(db_file)
    db.insert({
        'name': 'Aman Verma',
        'items': 1,
        'contact': 7890701597
    })
Example #11
0
def process_and_add_one(pdf_path):
    pdf_name = pdf_path.split("/")
    pdf_name = pdf_name[-1]
    directory = pdf_path[0 : -len(pdf_name)]
    stripped_name = pdf_name[0:-4]
    title_path = title_dir + "/" + stripped_name + ".xml"
    extract_title(pdf_path, title_path)
    # check if title extraction worked, otherwise stop with this one
    tf = open(title_path, "r")
    txml = tf.read()
    if txml == "title extraction failed":
        return None

    # build dictionary with info we've got
    tf = open(title_path, "r")
    txml = tf.read()
    txml = txml.split(">")
    title = "title not found"
    for line in txml:
        if "</title" in line:
            title = line[0:-7]
            print title
            break

    # save nice text version of title
    txt_name_path = title_path[0:-4] + ".txt"
    ftxt = open(txt_name_path, "a")
    ftxt.write(title)
    if title == "title not found":
        return None

    # if title was found, get DOI from it
    currDOI = get_DOI_from_title(title)
    # open/create tiny db
    db = TinyDB(db_loc)
    # make sure the paper isnt in the db already
    paper = Query()
    gotit = db.search(paper.ownDOI == currDOI)
    if gotit:
        return currDOI

    text_path_xml = text_dir + "/" + stripped_name + ".xml"
    text_path_txt = text_dir + "/" + stripped_name + ".txt"
    if not extract_text(pdf_path, text_path_xml, text_path_txt):
        print ("text extraction failed")
        return None

    # only extract bibtex if you don't have it already, because this is the long part
    # TODO: Return before doing bib extraction
    bib_path = bib_dir + "/" + stripped_name + ".bib"
    if not extract_bibtex(pdf_path, bib_path):
        print ("caught in the new code")
        return None

    refDOIs = get_ref_list_DOIs(bib_path)

    new_dict = {"ownDOI": currDOI, "refDOIs": refDOIs, "filename": stripped_name}
    db.insert(new_dict)
    return currDOI
Example #12
0
def test_gc(tmpdir):
    # See https://github.com/msiemens/tinydb/issues/92
    path = str(tmpdir.join('db.json'))
    table = TinyDB(path).table('foo')
    table.insert({'something': 'else'})
    table.insert({'int': 13})
    assert len(table.search(where('int') == 13)) == 1
    assert table.all() == [{'something': 'else'}, {'int': 13}]
Example #13
0
def write(message, args):
    # message.reply('I can understand hi or HI!')
    # react with thumb up emoji
    #message.react('+1')
    db = TinyDB('db.json')
    db.insert({'value': args});
    print args
    db.close()
Example #14
0
def test_json_kwargs(tmpdir):
    db_file = tmpdir.join('test.db')
    db = TinyDB(str(db_file), sort_keys=True, indent=4, separators=(',', ': '))

    # Write contents
    db.insert({'b': 1})
    db.insert({'a': 1})

    assert db_file.read() == '''{
Example #15
0
    def obj_func_range(self, func, bounds=[(0.8, 1.2), (0.8, 1.2)], x_r=6,
                       args=(), comps=None, vary='rs'):
        import numpy
        from tinydb import TinyDB
        import logging

        x_range = numpy.linspace(bounds[0][0], bounds[0][1], x_r)
        y_range = numpy.linspace(bounds[1][0], bounds[1][1], x_r)
        xg, yg = numpy.meshgrid(x_range, y_range)
        func_r = numpy.zeros((x_r, x_r))
        func_ed = numpy.zeros((x_r, x_r))
        func_es = numpy.zeros((x_r, x_r))
        func_eph = numpy.zeros((x_r, x_r))
        func_ee = numpy.zeros((x_r, x_r))
        func_ex = numpy.zeros((x_r, x_r))

        for i in range(xg.shape[0]):
            for j in range(yg.shape[0]):
                X = [xg[i, j], yg[i, j]]  # [x_1, x_2]

                f_out = func(X, *args)  # Scalar outputs
                func_r[i, j] = numpy.float64(f_out)
                func_ed[i, j] = numpy.float64(self.Epsilon_d)
                func_es[i, j] = numpy.float64(self.Epsilon_s)
                func_eph[i, j] = numpy.float64(self.Epsilon_ph)
                func_ee[i, j] = numpy.float64(self.Epsilon_e)
                func_ex[i, j] = numpy.float64(self.Epsilon_x)
                if False:
                    print('self.Epsilon_d = {}'.format(self.Epsilon_d))
                    print('self.Epsilon_s = {}'.format(self.Epsilon_s))
                    print('self.Epsilon_ph = {}'.format(self.Epsilon_ph))
                    print('self.Epsilon_e = {}'.format(self.Epsilon_e))
                    print('self.Epsilon_x = {}'.format(self.Epsilon_x))

        # Save results

        plot_kwargs = {'func_r' : func_r,
                       'xg' : xg,
                       'yg' : yg,
                       'func_ed' : func_ed,
                       'func_es' : func_es,
                       'func_eph' : func_eph,
                       'func_ee' : func_ee,
                       'func_ex' : func_ex,
                       }
        if False:
            # TODO: Find an easy way to store multidimensional numpy arrays
            db = TinyDB('.db/p_obj_db.json')
            db_store = {'dtype' : 'p_obj_r',
                        'comps' : comps,
                        'bounds' : bounds,
                        'x_r' : x_r,
                        'vary' : vary,
                        'plot_kwargs' : plot_kwargs}
            db.insert(db_store)
        return plot_kwargs
def test_serializer_nondestructive(tmpdir):
    path = str(tmpdir.join('db.json'))

    serializer = SerializationMiddleware(JSONStorage)
    serializer.register_serializer(DateTimeSerializer(), 'TinyDate')
    db = TinyDB(path, storage=serializer)

    data = {'date': datetime.utcnow(), 'int': 3}
    data_before = dict(data)  # implicitly copy
    db.insert(data)
    assert data == data_before
Example #17
0
def get_all():
  db = TinyDB('db/coffee.json')
  for i in links:
    print i
    s = i.split('/')
    flav = s[len(s)-1]
    temp = get_json(i)
    print flav
    print temp
    for i in temp:
      db.insert(i)
Example #18
0
def generate_customer_records(seed_first_names, seed_last_names, count=320, file_name='db.json', min_items=1,
                              max_items=50):
    db = TinyDB(file_name)

    names = [name for name in generate_names(seed_first_names, seed_last_names)]  # Generate the names
    random.shuffle(names)  # Shuffle to make them look "more" random

    for i in xrange(count):
        db.insert({
            'name': names[random.randint(0, len(names) - 1)],
            'items': random.randint(min_items, max_items),
            'contact': generate_phone_num()
        })
Example #19
0
def insertEntry( dbName, customer_id, rating=None, comment=None ):
   db = TinyDB( dbName )
   #dateTime = datetime.datetime.now().strftime("%I:%M%p on %B %d, %Y")
   entry = \
         {
            'customer_id' : customer_id,
            'rating' : rating,
            'stars' : starRating( rating ),
            'comment' : comment,
            'date' : time.time()
         }
   #if not contains( db, customer_id ):
   db.insert( entry )
Example #20
0
def test_serializer(tmpdir):
    path = str(tmpdir.join('db.json'))

    serializer = SerializationMiddleware(JSONStorage)
    serializer.register_serializer(DateTimeSerializer(), 'TinyDate')
    db = TinyDB(path, storage=serializer)

    date = datetime(2000, 1, 1, 12, 0, 0)

    db.insert({'date': date})
    db.insert({'int': 2})
    assert db.count(where('date') == date) == 1
    assert db.count(where('int') == 2) == 1
Example #21
0
def test_delete(tmpdir):
    path = str(tmpdir.join('db.json'))

    db = TinyDB(path, ensure_ascii=False)
    q = Query()
    db.insert({'network': {'id': '114', 'name': 'ok', 'rpc': 'dac',
                           'ticker': 'mkay'}})
    assert db.search(q.network.id == '114') == [
        {'network': {'id': '114', 'name': 'ok', 'rpc': 'dac',
                     'ticker': 'mkay'}}
    ]
    db.remove(q.network.id == '114')
    assert db.search(q.network.id == '114') == []
Example #22
0
class Test_001_Insert_by_valid_query_Function(unittest.TestCase):
	def setUp(self):
		self.db = TinyDB('db.json')

	def tearDown(self):
		self.db.purge()
		self.db.all()

	def test_simple_insert_valid_exist(self):
		print("case 1 insert data by valid query")
		self.db.insert({'Name': 'Greg', 'Email': '*****@*****.**', 'int' : 1, 'char':1})
		result=self.db.search(where('Name') == 'Greg')
		self.assertEqual(result,[{'Name': 'Greg', 'Email': '*****@*****.**', 'int' : 1, 'char':1}])
Example #23
0
class TinyDBStore(object):
    def __init__(self):
        self.drafts_db = TinyDB('event_drafts.json')
        self.events_db = TinyDB('events.json')

    # Drafts
    def contains_draft(self, user_id):
        return self.drafts_db.contains(Query().user_id == user_id)

    def new_draft(self, user_id):
        if self.contains_draft(user_id):
            self.drafts_db.remove(Query().user_id == user_id)

        self.drafts_db.insert({
            'user_id': user_id,
            'current_field': 0,
            'event': {}
        })

    def update_draft(self, user_id, event, current_field):
        self.drafts_db.update({
            'user_id': user_id,
            'current_field': current_field,
            'event': event
        }, Query().user_id == user_id)

    def get_draft(self, user_id):
        return self.drafts_db.get(Query().user_id == user_id)

    def remove_draft(self, user_id):
        self.drafts_db.remove(Query().user_id == user_id)

    # Events
    def insert_event(self, event):
        event_id = self.events_db.insert(event)
        event['id'] = event_id
        return event

    def update_event(self, event):
        self.events_db.update(event, eids=[event.eid])

    def remove_event(self, event):
        self.events_db.remove(eids=[event['id']])

    def get_events(self, user_id, name=None):
        if name:
            return self.events_db.search((Query().user_id == user_id) & (Query().name.test(lambda v: name in v)))
        return self.events_db.search(Query().user_id == user_id)

    def get_event(self, event_id):
        return self.events_db.get(eid=int(event_id))
Example #24
0
def write_prediction_cache(
        region_id,
        type_id,
        prediction_data,
        cache_path=CACHE_PATH,
        db_filename='prophet.json',
        logger=logging.getLogger('publicAPI')
):
    """update tinydb latest prediction

    Args:
        region_id (int): EVE Online region ID
        type_id (int): EVE Online type ID
        prediction_data (:obj:`pandas.DataFrame`): data to write to cache
        cache_path (str, optional): path to caches
        db_filename (str, optional): name of tinydb

    Returns:
        None

    """
    logger.info('--caching result')
    utc_today = datetime.utcnow().strftime('%Y-%m-%d')

    prediction_db = TinyDB(path.join(cache_path, db_filename))

    ## clear previous cache ##
    prediction_db.remove(
        (Query().cache_date <= utc_today) &
        (Query().region_id == region_id) &
        (Query().type_id == type_id)
    )

    ## Prepare new entry for cache ##
    cleaned_data = prediction_data.to_json(
        date_format='iso',
        orient='records'
        )
    data = {
        'cache_date': utc_today,
        'region_id': region_id,
        'type_id': type_id,
        'lastWrite': datetime.utcnow().timestamp(),
        'prediction':cleaned_data
    }
    logger.debug(data)
    prediction_db.insert(data)

    prediction_db.close()
Example #25
0
class ListCache(object):

    DB_FILE = 'listing_db.json'
    DB_TTL = timedelta(hours=12)

    def __init__(self):
        self.db = TinyDB(os.path.join(os.path.join(os.getcwd(), os.path.dirname(__file__)), ListCache.DB_FILE))

    @property
    def db(self):
        return self._db

    @db.setter
    def db(self, db):
        self._db = db

    def listing_in_cache(self, listing):
        lquery = Query()
        return self.db.contains(lquery.hsh == listing.hsh)

    def retrieve_listing(self, listing):
        lquery = Query()
        list_dict = self.db.get(lquery.hsh == listing.hsh)
        return Listing.from_dict(list_dict)

    def insert_listing(self, listing):
        if self.listing_in_cache(listing):
            self.update_listing(listing)
        else:
            list_dict = listing.as_dict()
            list_dict['last_updated'] = datetime.now().isoformat()
            list_dict['hsh'] = listing.hsh
            self.db.insert(list_dict)

    def remove_listing(self, listing):
        lquery = Query()
        self.db.remove(lquery.hsh == listing.hsh)

    def update_listing(self, listing):
        lquery = Query()
        if self.listing_in_cache(listing):
            self.remove_listing(listing)
        self.insert_listing(listing)

    def remove_old_listings(self):
        list_ar = self.db.all()
        for listing in list_ar:
            if datetime.strptime(listing['last_updated'], '%Y-%m-%dT%H:%M:%S.%f') < datetime.now() - ListCache.DB_TTL:
                self.remove_listing(Listing.from_dict(listing))
Example #26
0
def create_database():
    """Create and initialize the the database object

    Returns the TinyDB object that will be used to store data
    """
    db = TinyDB(storage=MemoryStorage)

    db.insert(
        {
            "type": "waypoints",
            "waypoints": {}
        }
    )

    return db
Example #27
0
def test_caching_json_write(tmpdir):
    path = str(tmpdir.join('test.db'))

    db = TinyDB(path, storage=CachingMiddleware(JSONStorage))

    db.insert({'key': 'value'})

    db.close()

    # Verify database filesize
    statinfo = os.stat(path)
    assert statinfo.st_size != 0

    # Assert JSON file has been closed
    assert db._storage._handle.closed
Example #28
0
File: clean.py Project: labase/eica
    def new_use_once_only_clean_db_with_merged_sessions_and_prognostics(self):
        new_data_base = TinyDB(JSONDBOUT)
        prognostics = self.parse_prognostics()
        users_with_demographics = list(set(n for n in self.new_find_all_users_names()))
        new_user_list = [dict(
            jogada=self.new_list_play_data_adjusted_with_delta(user),
            **prognostics[user.lower()],
            **self.new_find_inconsistent_users_ids(user)
        ) for user in users_with_demographics]
        for user in new_user_list:
            print({key: val if key != "jogada" else val[0] for key, val in user.items()})
            if not new_data_base.search(self.query.user == user["user"]):
                new_data_base.insert(user)

        return new_user_list
def process_new_entries(entries, changelog_db: TinyDB):
    """Processes new entries:
    If there are not any new entries, do nothing, else write them in file and send email.
    It also saves name of file with changes to db for its better managing.
    """
    changelog = datetime.now().strftime('%y%m%d%H') + "_changelog.txt"
    if not len(entries) == 0:
        logging.info("Found " + str(len(entries)) + " new files.")
        with open(changelog, 'w') as file_out:
            for entry in entries:
                file_out.write(entry['path'] + '\n')
        send_mail(changelog)
        changelog_db.insert({'path': changelog, 'datetime': datetime.now()})
    else:
        logging.info("No new files found.")
def test_serializer_recursive(tmpdir):
    path = str(tmpdir.join('db.json'))

    serializer = SerializationMiddleware(JSONStorage)
    serializer.register_serializer(DateTimeSerializer(), 'TinyDate')
    db = TinyDB(path, storage=serializer)

    date = datetime(2000, 1, 1, 12, 0, 0)
    datenow = datetime.utcnow()
    dates = [{'date': date, 'hp': 100}, {'date': datenow, 'hp': 1}]
    data = {'dates': dates, 'int': 10}
    db.insert(data)
    db.insert({'int': 2})
    assert db.count(where('dates').any(where('date') == date)) == 1
    assert db.count(where('int') == 2) == 1
Example #31
0
            'recordCreated': issued,
            'recordUpdated': datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'),
            'type': type_,
            'title': title,
            'description': description,
            'contactPoint': contact,
            'associations': links,
            'externalId': identifier,
            'themes': themes,
            'extent': {
                'spatial': {
                    'bbox': [[bbox]],
                    'crs': bbox_crs
                },
                'temporal': {
                    'interval': [te_begin, te_end],
                    'trs':
                    'http://www.opengis.net/def/uom/ISO-8601/0/Gregorian'  # noqa
                }
            },
            '_metadata-anytext': _anytext
        }
    }

    try:
        res = db.insert(json_record)
        print('Metadata record {} loader with internal id {}'.format(
            xml_file, res))
    except Exception as err:
        print(err)
Example #32
0
class Measurements:
    """ Take network measurements """

    def __init__(self, args, nmxperimentctrl):
        self.nma = nmxperimentctrl
        if self.nma.conf is None:
            log.error("No toml configuration.")
            os.exit(1)
        self.results = {}
        self.quiet = args.quiet

        self.sites = list(self.nma.conf['reference_site_dict'].keys())
        self.labels = self.nma.conf['reference_site_dict']

        if self.nma.conf['databases']['tinydb_enable']:
            try:
                Path(Path.cwd().joinpath(self.nma.conf['databases']['tinydb_path'])).mkdir(parents=True, exist_ok=True)
                speedtest_json = Path.cwd().joinpath(self.nma.conf['databases']['tinydb_path'], 'speedtest.json')
                seen_devices_json = Path.cwd().joinpath(self.nma.conf['databases']['tinydb_path'], 'seen_devices.json')

                if not speedtest_json.exists():
                    speedtest_json.touch()
                if not seen_devices_json.exists():
                    seen_devices_json.touch()

                self.speed_db = TinyDB(speedtest_json)
                self.dev_db = TinyDB(seen_devices_json)

                log.info("using {0}".format(speedtest_json))
                log.info("using {0}".format(seen_devices_json))

            except Exception as e:
                log.error("tinydb continuing without tinydb ({0} / {1})".
                    format(self.nma.conf['databases']['tinydb_path'], "{}".format(e)))
                self.nma.conf['databases']['tinydb_enable'] = False
        else:
            log.info("tinydb disabled")

        if self.nma.conf['databases']['tinydb_enable'] and (len(self.speed_db.all()) == 0):
            self.speed_db.insert(
                {'download': 600, 'upload': 50, 'test': False})

        if not self.quiet:
            print("\n --- NETWORK MEASUREMENTS ---")

    def update_sites(self, sites):
        """
        Method updates sites to visit during latency sites. Takes text file
        containing websites as input
        """

        self.labels = {}

        with open(sites) as f:
            for line in f:
                (website, label) = line.split()
                self.labels[website] = label

        self.sites = list(self.labels.keys())

    def update_max_speed(self, measured_down, measured_up):
        """
        Method updates the maximum measured upload and downlaod bandwidth
        from either the iperf3 test or the ookla speed test. This value
        is used as the guess for the iperf3 test
        """

        max_speed = self.speed_db.all()
        max_down = max(measured_down, max_speed[0]['download'])
        max_up = max(measured_up, max_speed[0]['upload'])

        if max_speed[0]['test']:
            self.speed_db.update({'download': max_down})
            self.speed_db.update({'upload': max_up})
        else:
            self.speed_db.update({'download': measured_down})
            self.speed_db.update({'upload': measured_up})

        self.speed_db.update({'test': True})

    def speed(self, run_test):
        """ Test runs Ookla Speed test """

        if not run_test:
            return

        output = Popen('speedtest --json', shell=True, stdout=PIPE)
        res, _ = output.communicate()
        res_json = json.loads(res)

        download_speed = res_json["download"] / 1e6
        upload_speed = res_json["upload"] / 1e6

        self.update_max_speed(float(download_speed), float(upload_speed))

        self.results["speedtest_download"] = download_speed
        self.results["speedtest_upload"] = upload_speed

        if not self.quiet:
            print('\n --- Ookla speed tests ---')
            print(f'Download: {download_speed} Mb/s')
            print(f'Upload:   {upload_speed} Mb/s')
        return test_results

    def ping_latency(self, run_test):
        """
        Method records ping latency to self.sites
        """

        if not run_test:
            return

        ping_res = None

        for site in self.sites:
            ping_cmd = "ping -i {:.2f} -c {:d} -w {:d} {:s}".format(
                0.25, 10, 5, site)
            ping_res = Popen(ping_cmd, shell=True,
                             stdout=PIPE).stdout.read().decode('utf-8')

            ping_pkt_loss = float(re.findall(', ([0-9.]*)% packet loss',
                                             ping_res, re.MULTILINE)[0])

            ping_rtt_ms = re.findall(
                'rtt [a-z/]* = ([0-9.]*)/([0-9.]*)/([0-9.]*)/([0-9.]*) ms'
                , ping_res)[0]

            ping_rtt_ms = [float(v) for v in ping_rtt_ms]

            label = self.labels[site]

            self.results[label + "_packet_loss_pct"] = ping_pkt_loss
            self.results[label + "_rtt_min_ms"] = ping_rtt_ms[0]
            self.results[label + "_rtt_max_ms"] = ping_rtt_ms[2]
            self.results[label + "_rtt_avg_ms"] = ping_rtt_ms[1]
            self.results[label + "_rtt_mdev_ms"] = ping_rtt_ms[3]

            if not self.quiet:
                print(f'\n --- {label} ping latency ---')
                print(f'Packet Loss: {ping_pkt_loss}%')
                print(f'Average RTT: {ping_rtt_ms[0]} (ms)')
                print(f'Minimum RTT: {ping_rtt_ms[1]} (ms)')
                print(f'Maximum RTT: {ping_rtt_ms[2]} (ms)')
                print(f'RTT Std Dev: {ping_rtt_ms[3]} (ms)')
        
        return ping_res

    def latency_under_load(self, run_test, client, port):
        """
        Method records ping latency under load to self.sites_load
        """

        if not run_test: return
        if not client:   return

        if 'targets' in self.nma.conf['latency_under_load']:
            targets = self.nma.conf['latency_under_load']['targets']
        else:
            return

        ping_res = None

        for upload in [True, False]:

            ul_dl = "ul" if upload else "dl"

            load = "/usr/local/src/nm-exp-active-netrics/bin/iperf3.sh -c {} -p {} -i 0 -t 10 {}"\
                       .format(client, port, "" if upload else "-R" )

            load += " > /dev/null & sleep 2 && echo starting ping && "

            for site in targets:

                ping_cmd = "ping -i 0.25 -c 10 -w 5 {:s}".format(site)
                
                start = time.time()
                ping_res = Popen(load + ping_cmd, shell=True,
                                 stdout=PIPE).stdout.read().decode('utf-8')

                ping_pkt_loss = float(re.findall(', ([0-9.]*)% packet loss',
                                                 ping_res, re.MULTILINE)[0])

                ping_rtt_ms = re.findall(
                    'rtt [a-z/]* = ([0-9.]*)/([0-9.]*)/([0-9.]*)/([0-9.]*) ms'
                    , ping_res)[0]

                ping_rtt_ms = [float(v) for v in ping_rtt_ms]

                label = self.labels[site]

                self.results[f"{label}_packet_loss_pct_under_{ul_dl}"] = ping_pkt_loss
                self.results[f"{label}_rtt_min_ms_under_{ul_dl}"] = ping_rtt_ms[0]
                self.results[f"{label}_rtt_max_ms_under_{ul_dl}"] = ping_rtt_ms[2]
                self.results[f"{label}_rtt_avg_ms_under_{ul_dl}"] = ping_rtt_ms[1]
                self.results[f"{label}_rtt_mdev_ms_under_{ul_dl}"] = ping_rtt_ms[3]

                if not self.quiet:
                    print(f'\n --- {label} ping latency under load ---')
                    print(f'Packet Loss Under Load: {ping_pkt_loss}%')
                    print(f'Average RTT Under Load: {ping_rtt_ms[1]} (ms)')
                    print(f'Minimum RTT Under Load: {ping_rtt_ms[0]} (ms)')
                    print(f'Maximum RTT Under Load: {ping_rtt_ms[2]} (ms)')
                    print(f'RTT Std Dev Under Load: {ping_rtt_ms[3]} (ms)')

                if (time.time() - start) < 11:
                    time.sleep(11 - (time.time() - start))
        
        return ping_res

    def dns_latency(self, run_test):
        """
        Method records dig latency for each site in self.sites
        """

        if not run_test:
            return

        dig_res = None

        target = '8.8.8.8'

        if 'target' in self.nma.conf['dns_latency'].keys():
            target = self.nma.conf['dns_latency']['target']

        dig_delays = []

        for site in self.sites:
            dig_cmd = f'dig @{target} {site}'
            dig_res = Popen(dig_cmd, shell=True,
                            stdout=PIPE).stdout.read().decode('utf-8')

            dig_res_qt = re.findall('Query time: ([0-9]*) msec',
                                 dig_res, re.MULTILINE)[0]
            dig_delays.append(int(dig_res_qt))

        self.results["dns_query_avg_ms"] = sum(dig_delays) / len(dig_delays)
        self.results["dns_query_max_ms"] = max(dig_delays)

        if not self.quiet:
            print(f'\n --- DNS Delays (n = {len(dig_delays)}) ---')
            print(f'Avg DNS Query Time: {self.results["dns_query_avg_ms"]} ms')
            print(f'Max DNS Query Time: {self.results["dns_query_max_ms"]} ms')

        return dig_res

    def hops_to_backbone(self, run_test):
        """
        Method counts hops to 'ibone', center that all data leaving Chicago
        travels through
        """

        if not run_test:
            return

        tr_res = None

        target = 'www.google.com'

        if 'target' in self.nma.conf['hops_to_backbone']:
            target = self.nma.conf['hops_to_backbone']['target']

        tr_cmd = 'traceroute -m 15 -N 32 -w3 {0} | grep -m 1 ibone'.format(target)
        tr_res = Popen(tr_cmd, shell=True,
                       stdout=PIPE).stdout.read().decode('utf-8')

        tr_res_s = tr_res.strip().split(" ")

        if len(tr_res_s):
            hops = int(tr_res_s[0])
        else:
            hops = -1

        self.results["hops_to_backbone"] = hops

        if not self.quiet:
            print('\n --- Hops to Backbone ---')
            print(f'Hops: {self.results["hops_to_backbone"]}')
        
        return tr_res

    def hops_to_target(self, site):
        """
        Method counts the number of hops to the target site
        """

        if not site:
            return

        tr_res = None

        target = 'www.google.com'

        if 'target' in self.nma.conf['hops_to_target']:
            target = self.nma.conf['hops_to_target']['target']

        tr_cmd = f'traceroute -m 20 -q 5 -w 2 {target} | tail -1 | awk "{{print $1}}"'
        tr_res = Popen(tr_cmd, shell=True,
                       stdout=PIPE).stdout.read().decode('utf-8')

        tr_res_s = tr_res.strip().split(" ")

        hops = -1

        if len(tr_res_s):
            hops = int(tr_res_s[0])

        label = self.labels[target]

        self.results[f'hops_to_{label}'] = hops

        if not self.quiet:
            print('\n --- Hops to Target ---')
            print("Hops to {}: {}".format(target,
                                          self.results[f'hops_to_{label}']))
        return tr_res

    def connected_devices_arp(self, run_test):
        """
        Method counts the number of active devices on the network.
        """


        if not run_test:
            return

        res = {}

        ts = int(time.time())

        route_cmd = "ip r | grep /24 | awk '{print $1;}'"
        subnet = Popen(route_cmd, shell=True,
                       stdout=PIPE).stdout.read().decode('utf-8')

        nmap_cmd = f'nmap --unprivileged -sn {subnet}'
        Popen(nmap_cmd, shell=True, stdout=PIPE)

        arp_cmd = ("/usr/sbin/arp -i eth0 -n | grep : |"
                   "grep -v '_gateway' | tr -s ' ' | "
                   "cut -f3 -d' ' | sort | uniq")

        arp_res = Popen(arp_cmd, shell=True,
                        stdout=PIPE).stdout.read().decode('utf-8')
        res['arp'] = arp_res

        devices = set(arp_res.strip().split("\n"))
        active_devices = [[dev, ts, 1] for dev in devices]

        for device in active_devices:
            if self.dev_db.contains(where('mac_addr') == device[0]):
                self.dev_db.update(increment("n"),
                                   where('mac_addr') == device[0])
                self.dev_db.update(tdb_set('last_seen', device[1]),
                                   where('mac_addr') == device[0])
            else:
                self.dev_db.insert({'mac_addr': device[0],
                                    'last_seen': device[1],
                                    'n': device[2]})

        print(self.dev_db.all())
        ndev_past_day = len(self.dev_db.search(
            where('last_seen') > (ts - 86400)))
        ndev_past_week = len(self.dev_db.search(
            where('last_seen') > (ts - 86400*7)))

        print(ndev_past_day)
        self.results["devices_active"] = len(active_devices)
        self.results["devices_total"] = self.dev_db.count(where('n') >= 1)
        self.results["devices_1day"] = ndev_past_day
        self.results["devices_1week"] = ndev_past_week

        if not self.quiet:
            print('\n --- Number of Devices ---')
            print(f'Number of active devices: '
                  f'{self.results["devices_active"]}')
            print(f'Number of total devices: '
                  f'{self.results["devices_total"]}')
            print(f'Number of devices in last 1 day:'
                  f' {self.results["devices_1day"]}')
            print(f'Number of devices in last week:'
                  f' {self.results["devices_1week"]}')
        return res

    def iperf3_bandwidth(self, client, port):
        """
        Method for recorded results of iperf3 bandwidth tests
        """

        if not client:
            return

        iperf_res = None

        if self.nma.conf['databases']['tinydb_enable']:
            speed = self.speed_db.all()

        measured_bw = {'upload': 0, 'download': 0}
        measured_jitter = {'upload': 0, 'download': 0}

        for direction, value in measured_bw.items():
            reverse = False

            bandwidth = 0

            if self.nma.conf['databases']['tinydb_enable']:
                bandwidth = speed[0][direction] + 10
                if direction == 'download':
                    bandwidth += 40
                    reverse = True

            iperf_cmd = "/usr/local/src/nm-exp-active-netrics/bin/iperf3.sh -c {} -p {} -u -i 0 -b {}M {} | awk 'NR=={}'"\
                .format(client, port, bandwidth,
                        '-R' if reverse else "", 10 if reverse else 8)
            iperf_res = Popen(iperf_cmd, shell=True,
                              stdout=PIPE).stdout.read().decode('utf-8')

            measured_bw[direction] = iperf_res.split()[6]
            measured_jitter[direction] = iperf_res.split()[8]

            self.results[f'iperf_udp_{direction}'] = float(
                measured_bw[direction])
            self.results[f'iperf_udp_{direction}_jitter_ms'] = float(
                measured_jitter[direction])

            if not self.quiet:
                if direction == 'upload':
                    print('\n --- iperf Bandwidth and Jitter ---')
                print(f'{direction} bandwidth: {measured_bw[direction]} Mb/s')
                print(f'{direction} jitter: {measured_jitter[direction]} ms')

        if self.nma.conf['databases']['tinydb_enable']:
            self.update_max_speed(float(measured_bw['download']),
                              float(measured_bw['upload']))
        return iperf_res


    def tshark_eth_consumption(self, run_test, dur = 60):

        if not run_test:
            return

        local_ip_cmd   = "ifconfig eth0 | grep -oP '(?<=inet\s)\d+(\.\d+){3}'"
        gateway_ip_cmd = "ip r | grep default | cut -f3 -d' '"

        loc_ip = Popen(local_ip_cmd,   shell = True, stdout = PIPE).stdout.read().decode('utf-8').strip()
        gw_ip  = Popen(gateway_ip_cmd, shell = True, stdout = PIPE).stdout.read().decode('utf-8').strip()

        cap_filter = f"not broadcast and not multicast and not (ip src {loc_ip} or ip dst {loc_ip} or ip src {gw_ip} or ip dst {gw_ip})"

        tshark_cmd = f'tshark -f "{cap_filter}" -i eth0 -a duration:{dur} -Q -z conv,ip -z io,stat,{dur*2}'
        tshark_res = Popen(tshark_cmd, shell = True, stdout = PIPE).stdout.read().decode('utf-8')

        duration = float(re.findall("Duration: ([0-9.]*) secs", tshark_res, re.MULTILINE)[0])

        columns = ["A", "B", "BA_fr", "BA_bytes", "AB_fr", "AB_bytes", "tot_fr", "to_bytes", "start", "duration"]

        tshark_conv = re.findall('(.*<->.*)', tshark_res, re.MULTILINE)
        tshark_list = [re.sub("<->", "", l).split() for l in tshark_conv]

        tshark_conn = [{c : conn[ci] for ci, c in enumerate(columns)}
                       for conn in tshark_list]

        dl, ul = 0, 0
        for conn in tshark_conn:
            if "192.168" in conn["A"]:
                dn, up = "BA", "AB"
            else:
                dn, up = "AB", "BA"

            dl += float(conn[f"{dn}_bytes"])
            ul += float(conn[f"{up}_bytes"])


        # Converts bytes to Mbps
        self.results["consumption_download"] = dl * 8 / 1e6 / duration
        self.results["consumption_upload"]   = ul * 8 / 1e6 / duration

        return tshark_res
Example #33
0
class EnvDb:
    def __init__(self, db_path):
        from tinydb import TinyDB
        self.entries = []
        self.db = TinyDB(db_path)
        # There is a main problem with querying the way it is done here. The content of the DB is only loaded once
        # and therefore the DB is not used as a DB and the object must the re-instantiated to update the entries.
        self._load_entries()

    def _load_entries(self):
        self.entries = []
        # load everything into memory
        for db_entry in self.db:
            try:
                self.entries.append(EnvDbEntry.from_config(db_entry))
            except Exception as e:
                logger.warning(
                    "Could not load entry with cli path {0} due to: {1}. "
                    "Skipping...".format(str(db_entry), str(e)))

    def get_entry_by_model(self,
                           model_name,
                           only_most_recent=True,
                           only_valid=False):
        # iterate over all the entries and select the ones where the model_name is part of one of the listed models
        # For checking split the model_name by "/" as well as the env-compatible model names and then check equality.
        # Select the one with the most recent timestamp
        norm_name = lambda x: x.lstrip("/").rstrip("/")
        norm_model_name = norm_name(model_name)
        query_model_tk_len = len(norm_model_name.split("/"))
        sel_entries = {}
        for entry in self.get_all(only_valid=only_valid):
            pre_sel = [m for m in entry.compatible_models if model_name in m]
            sel = [
                m for m in pre_sel
                if "/".join(norm_name(m).split("/")[:query_model_tk_len]) ==
                norm_model_name
            ]
            if len(sel) != 0:
                sel_entries[entry.timestamp] = entry
        ordered_entries = OrderedDict([
            (k, sel_entries[k]) for k in sorted(list(sel_entries.keys()))
        ][::-1])

        if only_most_recent:
            if len(ordered_entries) == 0:
                return None
            else:
                return list(ordered_entries.values())[0]
        else:
            return list(ordered_entries.values())

    def get_all_unfinished(self):
        unfinished = []
        for e in self.entries:
            if not e.successful or e.cli_path is None or not os.path.exists(
                    e.cli_path):
                unfinished.append(e)
        return unfinished

    def db_remove_unfinished(self):
        [self.remove(e) for e in self.get_all_unfinished()]

    def get_all(self, only_valid=False):
        entries = self.entries
        if only_valid:
            invalid = self.get_all_unfinished()
            entries = [e for e in entries if e not in invalid]
        return entries

    def remove(self, entry):
        self.entries = [e for e in self.entries if e != entry]

    def append(self, entry):
        self.entries.append(entry)

    def save(self):
        self.db.truncate()
        for entry in self.entries:
            self.db.insert(entry.get_config())

    def __del__(self):
        self.db.close()
Example #34
0
 def add_record(self, name='', email='', tags='', addressLine1='', addressLine2='', city='', state='', zip_code=''):
     db = TinyDB(self.dbfilename)
     return db.insert({'name': name, 'email': email, 'tags': tags, 'addressLine1': addressLine1, 'addressLine2': addressLine2, 'city': city, 'state': state, 'zip_code': zip_code})
def sendQadmin(form):
    db = TinyDB("QueryAdminStd.json")
    db.insert({'ID': ll.ID(),'Query':txt_query.get()})
    form.destroy()
    stdFrom()
Example #36
0
def download(name: str,
             force: bool = False,
             url: str = None,
             version: str = None) -> bool:
    """
    Download corpus.

    The available corpus names can be seen in this file:
    https://github.com/PyThaiNLP/pythainlp-corpus/blob/master/db.json

    :param str name: corpus name
    :param bool force: force download
    :param str url: URL of the corpus catalog
    :param str version: Version of the corpus
    :return: **True** if the corpus is found and succesfully downloaded.
             Otherwise, it returns **False**.
    :rtype: bool

    :Example:
    ::

        from pythainlp.corpus import download

        download('wiki_lm_lstm', force=True)
        # output:
        # Corpus: wiki_lm_lstm
        # - Downloading: wiki_lm_lstm 0.1
        # thwiki_lm.pth:  26%|██▌       | 114k/434k [00:00<00:00, 690kB/s]

    By default, downloaded corpus and model will be saved in
    ``$HOME/pythainlp-data/``
    (e.g. ``/Users/bact/pythainlp-data/wiki_lm_lstm.pth``).
    """
    if not url:
        url = corpus_db_url()

    corpus_db = get_corpus_db(url)
    if not corpus_db:
        print(f"Cannot download corpus catalog from: {url}")
        return False

    corpus_db = corpus_db.json()

    # check if corpus is available
    if name in list(corpus_db.keys()):
        local_db = TinyDB(corpus_db_path())
        query = Query()

        corpus = corpus_db[name.lower()]
        print("Corpus:", name)
        if version is None:
            version = corpus["latest_version"]
        corpus_versions = corpus["versions"][version]
        file_name = corpus_versions["filename"]
        found = local_db.search((query.name == name)
                                & (query.version == version))

        # If not found in local, download
        if force or not found:
            print(f"- Downloading: {name} {version}")
            _download(
                corpus_versions["download_url"],
                file_name,
            )
            _check_hash(
                file_name,
                corpus_versions["md5"],
            )

            if found:
                local_db.update({"version": version}, query.name == name)
            else:
                local_db.insert({
                    "name": name,
                    "version": version,
                    "filename": file_name
                })
        else:
            if local_db.search(query.name == name
                               and query.version == version):
                # Already has the same version
                print("- Already up to date.")
            else:
                # Has the corpus but different version
                current_ver = local_db.search(query.name == name)[0]["version"]
                print(f"- Existing version: {current_ver}")
                print(f"- New version available: {version}")
                print("- Use download(data_name, force=True) to update")

        local_db.close()
        return True

    print("Corpus not found:", name)
    return False
Example #37
0
 print("MIDItoOBS made by lebaston100.de")
 print("!!MAKE SURE OBS IS RUNNING OR THIS SCRIPT WILL CRASH!!")
 print("Select Midi Device")
 deviceList = mido.get_input_names()
 counter = 0
 for device in deviceList:
     print("%s: %s" % (counter, device))
     counter += 1
 input_select = int(input("Select 0-%s: " % str(len(deviceList) - 1)))
 if input_select in range(0, len(deviceList)):
     print("You selected: %s (%s)" %
           (str(input_select), deviceList[input_select]))
     result = db.search(Query().value == deviceList[input_select])
     if result:
         db.remove(Query().type == "device")
         db.insert({"type": "device", "value": deviceList[input_select]})
     else:
         db.insert({"type": "device", "value": deviceList[input_select]})
     try:
         midiport = mido.open_input(deviceList[input_select])
     except:
         print("The midi device might be used by another application.")
         print(
             "Please close the device in the other application and restart this script."
         )
         time.sleep(8)
         sys.exit()
     atexit.register(exitScript)
     print("Please press key or move fader/knob on midi controller")
     mainLoop()
 else:
Example #38
0
class PRRLATinyDB:
    '''
    Helper class for simplifying interactions with the TinyDB instance.
    '''
    def __init__(self, path):
        self.db = TinyDB(path)

    def show_collections(self, institution_keys=None):
        '''
        Prints a list of collections from the database.

        If a list of institution keys is specified (with values found in the 
        `institution_key` column in the database), then show collections of 
        only those institutions. Otherwise, show all collections in the 
        database.

        Args:
          institution_keys: a list of values found in the `institution_key` 
              column in the database

        Returns:
          None
        '''
        # TODO: add parameter to choose which fields to print, or to print data in tabular form
        if (institution_keys is None):
            print(dumps(self.db.all(), indent=4))
        else:
            Row = Query()
            results = []
            for institution_key in institution_keys:
                results += self.db.search(
                    Row.institution_key == institution_key)
            print(dumps(results, indent=4))

    def import_collections(self,
                           resourcesync_sourcedescription,
                           oaipmh_endpoint,
                           collection_keys=None,
                           institution_name=None,
                           resource_dir='resourcesync',
                           overwrite=False):
        '''
        Adds an institution's ResourceSync-able collections to the database.

        If `collection_keys` is specified, then add to the database only
        the collections specified by that list. Otherwise, add all collections 
        to the database.

        Args:
          resourcesync_sourcedescription: a ResourceSync SourceDescription URL
              see https://www.openarchives.org/rs/1.1/resourcesync#SourceDesc
          oaipmh_endpoint: a OAI-PMH base URL
              see https://www.openarchives.org/OAI/openarchivesprotocol.html#Identify
          collection_keys: a list of collection keys specifying an
              exclusive list of collections to add to the database
          institution_name: human-readable name of the institution which
              should be used instead of its OAI-PMH repositoryName
          resource_dir: path to the local directory to store copies of the
              synced resources to, relative to the home directory "~"
          overwrite: whether or not to overwrite rows in the database that
              match the `collection_key` and `institution_key`

        Returns:
          None
        '''
        rs_soup = BeautifulSoup(
            get(resourcesync_sourcedescription).content, 'xml')
        capabilitylist_urls = [a.string for a in rs_soup.find_all('loc')]

        sickle = Sickle(oaipmh_endpoint)
        sets = sickle.ListSets()
        identify = sickle.Identify()

        set_spec_to_name = {z.setSpec: z.setName for z in sets}
        url_map_from = '/'.join(oaipmh_endpoint.split(sep='/')[:-1]) + '/'

        i_name = institution_name if institution_name is not None else identify.repositoryName

        has_capability = lambda c, tag: tag.md is not None and 'capability' in tag.md.attrs and tag.md[
            'capability'] == c

        for capabilitylist_url in capabilitylist_urls:

            # For now, get setSpec from the path component of the CapabilityList URL (which may have percent-encoded characters)
            set_spec = urllib.parse.unquote(
                urllib.parse.urlparse(capabilitylist_url).path.split(
                    sep='/')[2])

            # If a subset of collections is specified, only add collections that belong to it. Otherwise, add all collections.
            if collection_keys is None or (collection_keys is not None
                                           and set_spec in collection_keys):

                r_soup = BeautifulSoup(get(capabilitylist_url).content, 'xml')

                # ResourceList should always exist, but if it doesn't, log it and skip this collection
                try:
                    resourcelist_url = r_soup.find(
                        functools.partial(has_capability,
                                          'resourcelist')).loc.string
                except AttributeError:
                    # TODO: log it
                    pass
                    continue

                # If no ChangeList exists yet, that's ok; predict what its URL will be
                try:
                    changelist_url = r_soup.find(
                        functools.partial(has_capability,
                                          'changelist')).loc.string
                except AttributeError:
                    changelist_url = '/'.join(
                        resourcelist_url.split(sep='/')[:-1] +
                        ['changelist_0000.xml'])

                print(
                    self.__collection_identifier(i_name,
                                                 identify.repositoryIdentifier,
                                                 set_spec_to_name[set_spec],
                                                 set_spec))

                # We can add the collection to the database now
                # TODO: catch exceptions
                self.__insert_or_update(identify.repositoryIdentifier, i_name,
                                        set_spec, set_spec_to_name[set_spec],
                                        resourcelist_url, changelist_url,
                                        url_map_from, resource_dir, overwrite)

    def remove_collections(self, institution_key, collection_keys=None):
        '''
        Removes collections of a given institution from the database.

        If a list of collection keys is specified (with values found in the 
        `collection_key` column in the database), then remove only those 
        collections. Otherwise, remove all of the institution's collections.

        Args:
          institution_key: a value found in the `institution_key` column in 
              the database
          collection_keys: a list of values found in the `collection_key` 
              column in the database

        Returns
          None
        '''
        # TODO: print collections that we remove
        Row = Query()
        if (collection_keys is None):
            self.db.remove(Row.institution_key == institution_key)
        else:
            for collection_key in collection_keys:
                self.db.remove(Row.institution_key == institution_key
                               and Row.collection_key == collection_key)

    def __insert_or_update(self,
                           institution_key,
                           institution_name,
                           collection_key,
                           collection_name,
                           resourcelist_uri,
                           changelist_uri,
                           url_map_from,
                           resource_dir='resourcesync',
                           overwrite=False):
        '''
        Adds or updates a single row in the database.

        This method should normally be called only by `import_collections`.

        Args:
          institution_key: a machine-readable name for an institution
          institution_name: a human-readable name for an institution
          collection_key: a machine-readable name for a collection
          collection_name: a human-readable name for a collection
          resourcelist_uri: a URL for a ResourceSync ResourceList
          changelist_uri: a URL for a ResourceSync ChangeList
          url_map_from: the leading part of a Resource's URL to cut off in 
              order to map the URL to a local filename
          resource_dir: path to the local directory to store copies of the 
              synced resources to, relative to the home directory "~"
          overwrite: whether or not to overwrite rows in the database that 
              match the `collection_key` and `institution_key`

        Returns:
          None
        '''
        # TODO: change *_uri parameters to *_url
        # TODO: throw errors when warranted
        Row = Query()
        if not self.db.contains(Row.institution_key == institution_key
                                and Row.collection_key == collection_key):
            # NOTE: if either `collection_key` or `institution_key` change for any given collection,
            # the filesystem location of the saved files will also change,
            # since resources are saved under the path `file_path_map_to`/`institution_key`/`collection_key`.

            # TODO: keep track of potential stale directories so they can be manually deleted.
            # This could involve logging calls to `add` where a row in the DB doesn't match the `institution_key` and `collection_key` parameters,
            # but DOES match either 1) both the `institution_name` and `collection_name` parameters, or 2) one of the URI parameters.

            self.db.insert({
                'institution_key': institution_key,
                'institution_name': institution_name,
                'collection_key': collection_key,
                'collection_name': collection_name,
                'resourcelist_uri': resourcelist_uri,
                'changelist_uri': changelist_uri,
                'url_map_from': url_map_from,
                'file_path_map_to': resource_dir,
                'new': True
            })
        elif overwrite == True:

            # TODO: if `file_path_map_to` changes, then we need to do a baseline synchronization again,
            # because that means the files will change location on the filesystem.
            # However, `file_path_map_to` should not be changed once chosen.
            self.db.update(
                {
                    'institution_key': institution_key,
                    'institution_name': institution_name,
                    'collection_key': collection_key,
                    'collection_name': collection_name,
                    'resourcelist_uri': resourcelist_uri,
                    'changelist_uri': changelist_uri,
                    'url_map_from': url_map_from,
                    'file_path_map_to': resource_dir,
                }, Row.institution_key == institution_key
                and Row.collection_key == collection_key)
        else:
            # If row already exists and we don't want to overwrite, no-op.
            # TODO: log
            pass

    def __collection_identifier(self, repository_name, repository_identifier,
                                set_name, set_identifier):
        return repository_name + ' (' + repository_identifier + ') : ' + set_name + ' (' + set_identifier + ')'
Example #39
0
class Learner():
    """Class to provide the learning ability.
    """
    def __init__(self, nlp):
        """Initialization method of :class:`dragonfire.learn.Learner` class.

        Args:
            nlp:  :mod:`spacy` model instance.
        """

        self.pronouns = collections.OrderedDict(
        )  # Create an ordered dictionary
        self.pronouns["I"] = "YOU"
        self.pronouns["ME"] = "YOU"
        self.pronouns["MY"] = "YOUR"
        self.pronouns["MINE"] = "YOURS"
        self.pronouns["MYSELF"] = "YOURSELF"
        self.pronouns["OUR"] = "YOUR"
        self.pronouns["OURS"] = "YOURS"
        self.pronouns["OURSELVES"] = "YOURSELVES"
        self.pronouns["WE"] = "YOU"
        self.pronouns["US"] = "YOU"
        self.inv_pronouns = collections.OrderedDict(
        )  # Create an ordered dictionary
        self.inv_pronouns["YOU"] = "I"
        self.inv_pronouns["YOUR"] = "MY"
        self.inv_pronouns["YOURS"] = "MINE"
        self.inv_pronouns["YOURSELF"] = "MYSELF"
        self.inv_pronouns["YOURSELVES"] = "OURSELVES"

        self.auxiliaries = collections.OrderedDict(
        )  # Create an ordered dictionary
        self.auxiliaries["AM"] = "ARE"
        self.auxiliaries["'M"] = " ARE"
        self.auxiliaries["WAS"] = "WERE"
        self.inv_auxiliaries = collections.OrderedDict(
        )  # Create an ordered dictionary
        self.inv_auxiliaries["ARE"] = "AM"
        self.inv_auxiliaries["WERE"] = "WAS"

        home = expanduser("~")  # Get the home directory of the user
        self.db = TinyDB(
            home + '/.dragonfire_db.json'
        )  # This is where we store the database; /home/USERNAME/.dragonfire_db.json
        self.nlp = nlp  # Load en_core_web_sm, English, 50 MB, default model
        self.is_server = False
        self.db_session = None

    def respond(self, com, is_server=False, user_id=None):
        """Method to respond the user's input/command using learning ability.

        Args:
            com (str):  User's command.

        Keyword Args:
            is_server (bool):   Is Dragonfire running as an API server?
            user_id (int):      User's ID.

        Returns:
            str:  Response.

        .. note::

            Entry function for :class:`Learner` class. Dragonfire calls only this function. It does not handle TTS.

        """

        self.is_server = is_server
        is_public = True
        com = self.clean(com)
        doc = self.nlp(
            com
        )  # Command(user's speech) must be decoded from utf-8 to unicode because spaCy only supports unicode strings, self.nlp() handles all parsing
        subject = [
        ]  # subject list (subjects here usually are; I'M, YOU, HE, SHE, IT, etc.)
        types = []  # types of the previous noun phrases
        types.append("")
        for np in doc.noun_chunks:  # Iterate over the noun phrases(chunks) TODO: Cover 'dobj' also; doc = nlp(u'DESCRIBE THE SUN') >>> (u'THE SUN', u'SUN', u'dobj', u'DESCRIBE')
            types.append(np.root.dep_)
            np_text, is_public = self.detect_pronoun(np.text)
            # print("IS_PUBLIC: ", is_public)
            # Purpose of this if statement is completing possessive form of nouns
            if np.root.dep_ == 'pobj' and types[
                    -2] == 'nsubj':  # if it's an object of a preposition and the previous noun phrase's type was nsubj(nominal subject) then (it's purpose is capturing subject like MY PLACE OF BIRTH)
                subject.append(
                    np.root.head.text
                )  # append the parent text from syntactic relations tree (example: while nsubj is 'MY PLACE', np.root.head.text is 'OF')
                subject.append(
                    np_text
                )  # append the text of this noun phrase (example: while nsubj is 'MY PLACE', np.text is 'BIRTH')
            if np.root.dep_ == 'nsubj' and types[-2] not in [
                    'pobj', 'nsubj'
            ] and np.root.tag_ not in [
                    'WDT', 'WP', 'WP$', 'WRB'
            ]:  # if it's a nsubj(nominal subject) ("wh-" words can be considered as nsubj(nominal subject) but they are out of scope.  This is why we are excluding them.)
                subject.append(np_text)  # append the text of this noun phrase
            if np.root.dep_ == 'attr' and types[-2] not in [
                    'pobj', 'nsubj'
            ] and np.root.tag_ not in [
                    'WDT', 'WP', 'WP$', 'WRB'
            ]:  # if it's an attribute and the previous noun phrase's type was not nsubj(nominal subject)
                subject.append(np_text)  # append the text of this noun phrase
            if np.root.dep_ == 'dobj' and types[-2] not in [
                    'pobj', 'nsubj'
            ] and np.root.tag_ not in [
                    'WDT', 'WP', 'WP$', 'WRB'
            ]:  # if it's a dobj(direct object) and the previous noun phrase's type was not nsubj(nominal subject)
                subject.append(np_text)  # append the text of this noun phrase
        subject = [x.strip() for x in subject]
        subject = ' '.join(subject)  # concatenate all noun phrases found
        if subject:  # if the subject is not empty
            if subject.upper(
            ) in self.inv_pronouns:  # pass the learning ability if the user is talking about Dragonfire's itself
                return ""
            wh_found = False
            for word in doc:  # iterate over the each word in the given command(user's speech)
                if word.tag_ in [
                        'WDT', 'WP', 'WP$', 'WRB'
                ]:  # check if there is a "wh-" question (we are determining that if it's a question or not, so only accepting questions with "wh-" form)
                    wh_found = True
            if wh_found:  # if that's a question
                straight = self.db_get(
                    subject, is_public=is_public,
                    user_id=user_id)  # get the answer from the database
                if straight is None:
                    return self.db_get(
                        subject,
                        is_public=is_public,
                        user_id=user_id,
                        invert=True)  # if nothing found then invert
                return straight
            else:
                verb_found = False
                verbtense = None  # verbtense is the am/is/are of the main sentence
                clause = []  # is the information that we need to acknowledge
                verbs = []
                for word in doc:
                    # print(word.text, word.lemma_, word.pos_, word.tag_, word.dep_, word.shape_, word.is_alpha, word.is_stop)
                    if verb_found:  # get the all words comes after the first verb which will be our verbtense
                        if word.pos_ != 'PUNCT':  # exclude punctuations
                            clause.append(word.text)
                    if word.pos_ == 'VERB' and word.is_stop and not verb_found:  # if that's a verb and verb does not found yet then
                        verb_found = True  # verb is found
                        verbtense = word.text  # append it to verbtense
                    if word.pos_ == 'VERB':
                        verbs.append(word.text)
                clause = [x for x in clause]
                clause = ' '.join(clause).strip()  # concatenate the clause

                # keywords to order get and remove operations on the database
                if any(verb in verbs for verb in self.upper_capitalize(
                    ["forget", "remove", "delete", "update"])):
                    if self.is_server and is_public:
                        return "I cannot forget a general fact."
                    return self.db_delete(
                        subject, is_public=is_public, user_id=user_id
                    )  # if there is a record about the subject in the database then remove that record and...

                if any(verb in verbs for verb in self.upper_capitalize(
                    ["define", "explain", "tell", "describe"])):
                    return self.db_get(subject,
                                       is_public=is_public,
                                       user_id=user_id)

                if verbtense:
                    return self.db_upsert(
                        subject,
                        verbtense,
                        clause,
                        com,
                        is_public=is_public,
                        user_id=user_id)  # set the record to the database

    def db_get(self, subject, invert=False, is_public=True, user_id=None):
        """Function to get a record from the database.

        Args:
            subject (str):  Subject that extracted from the user's input/command.

        Keyword Args:
            invert (bool):      Is it invert mode? (swap subject and clause)
            is_public (int):    Is it a public record? (non-user specific)
            user_id (int):      User's ID.

        Returns:
            str:  Response.
        """

        if self.is_server:
            try:
                if invert:
                    fact = self.db_session.query(Fact).filter(
                        Fact.clause == subject, Fact.user_id == user_id,
                        Fact.is_public == is_public).order_by(
                            Fact.counter.desc()).first()
                else:
                    fact = self.db_session.query(Fact).filter(
                        Fact.subject == subject, Fact.user_id == user_id,
                        Fact.is_public == is_public).order_by(
                            Fact.counter.desc()).first()
                answer = fact.subject + ' ' + fact.verbtense + ' ' + fact.clause
                return self.mirror(answer)
            except NoResultFound:
                return None
        else:
            if invert:
                result = self.db.search(
                    Query().clause == subject
                )  # make a database search by giving subject string (inverted)
            else:
                result = self.db.search(
                    Query().subject ==
                    subject)  # make a database search by giving subject string
            if result:  # if there is a result
                dictionary = {}
                for row in result:  # iterate over the rows of the result
                    if row['verbtense'] not in dictionary:  # if the verbtense is not in the keys of the dictionary
                        dictionary[row['verbtense']] = []  # then add it
                    if row['clause'] not in dictionary[row[
                            'verbtense']]:  # if the clause is not in the value like; dictionary['is']
                        dictionary[row['verbtense']].append(
                            row['clause'])  # then append the clause
                if invert:
                    answer = row[
                        'subject']  # in WHO questions subject is actually the clause so we learn the subject from db
                else:
                    answer = subject  # the answer we will return
                first_verbtense = False
                for key, value in dictionary.items(
                ):  # iterate over the dictionary defined and assigned on above
                    if not first_verbtense:  # if the first verbtense assignment does not made yet
                        answer += ' ' + str(
                            key)  # concatenate with a whitespace
                        first_verbtense = True
                    else:
                        answer += ', ' + str(
                            key
                        )  # otherwise concatenate with a comma + whitespace
                    first_clause = False
                    for clause in value:  # iterate over the clauses of the key
                        if not first_clause:  # if the first verbtense assignment does not made yet
                            answer += ' ' + clause  # concatenate with a whitespace
                            first_clause = True
                        else:
                            answer += ' and ' + clause  # otherwise concatenate with ' AND '
                return self.mirror(
                    answer)  # mirror the answer (for example: I'M to YOU ARE)
            else:
                return None  # if there is no result return None

    def db_upsert(self,
                  subject,
                  verbtense,
                  clause,
                  com,
                  is_public=True,
                  user_id=None):
        """Function to insert(or update) a record to the database.

        Args:
            subject (str):      Subject that extracted from the user's input/command.
            verbtense (str):    The am/is/are in the user's input/command.
            clause (str):       Clause that contains the fact.
            com (str):          User's command.

        Keyword Args:
            invert (bool):      Is it invert mode? (swap subject and clause)
            is_public (int):    Is it a public record? (non-user specific)
            user_id (int):      User's ID.

        Returns:
            str:  Response.
        """

        if self.is_server:
            fact = self.db_session.query(Fact).filter(
                Fact.subject == subject, Fact.verbtense == verbtense,
                Fact.clause == clause, Fact.user_id == user_id,
                Fact.is_public == is_public).one_or_none()
            if not fact:
                new_fact = Fact(subject=subject,
                                verbtense=verbtense,
                                clause=clause,
                                user_id=user_id,
                                is_public=is_public)
                self.db_session.add(new_fact)
                self.db_session.commit()
            else:
                fact.counter += 1
                self.db_session.commit()
        else:
            if not self.db.search(
                (Query().subject == subject) & (Query().verbtense == verbtense)
                    & (Query().clause == clause)
            ):  # if there is no exacty record on the database then
                self.db.insert({
                    'subject': subject,
                    'verbtense': verbtense,
                    'clause': clause
                })  # insert the given data
        return "OK, I get it. " + self.mirror(
            com)  # mirror the command(user's speech) and return it to say

    def db_delete(self, subject, is_public=True, user_id=None):
        """Function to delete a record from the database.

        Args:
            subject (str):  Subject that extracted from the user's input/command.

        Keyword Args:
            is_public (int):    Is it a public record? (non-user specific)
            user_id (int):      User's ID.

        Returns:
            str: Response.
        """

        if self.is_server:
            if not is_public and user_id:
                fact = self.db_session.query(Fact).filter(
                    Fact.subject == subject,
                    Fact.user_id == user_id).one_or_none()
                if not fact:
                    return "I don't even know anything about " + self.mirror(
                        subject)
                else:
                    fact.delete()
                    self.db_session.commit()
                    return "OK, I forgot everything I know about " + self.mirror(
                        subject)
            else:
                return "I cannot forget a general fact about " + self.mirror(
                    subject)
        else:
            if self.db.remove(Query().subject == self.fix_pronoun(subject)):
                return "OK, I forgot everything I know about " + self.mirror(
                    subject)
            else:
                return "I don't even know anything about " + self.mirror(
                    subject)

    def mirror(self, answer):
        """Function to mirror the answer (for example: I'M to YOU ARE).

        Args:
            answer (str):  Prepared answer that just before the actual return of :func:`respond` method.

        Returns:
            str:  Response.
        """

        result = []
        types = []
        types.append("")
        doc = self.nlp(answer)
        for token in doc:
            types.append(token.lemma_)
            if token.lemma_ == "-PRON-":  # if it's a pronoun, mirror it
                if token.text.upper() in self.pronouns:
                    result.append(
                        self.pronouns[token.text.upper()].lower().strip())
                    continue
                if token.text.upper() in self.inv_pronouns:
                    result.append(
                        self.inv_pronouns[token.text.upper()].lower().strip())
                    continue
            if (token.lemma_ == "be" or token.dep_ == "aux") and types[
                    -2] == "-PRON-":  # if it's an auxiliary that comes right after a pronoun, mirror it
                if token.text.upper() in self.auxiliaries:
                    result.append(
                        self.auxiliaries[token.text.upper()].lower().strip())
                    continue
                if token.text.upper() in self.inv_auxiliaries:
                    result.append(self.inv_auxiliaries[
                        token.text.upper()].lower().strip())
                    continue
            result.append(token.text.strip())
        for i in range(len(result)):
            if result[i] == "i":
                result[i] = "I"
        result = ' '.join(result)  # concatenate the result
        return result.replace(
            " '", "'")  # fix for situations like "I 'AM", "YOU 'LL"

    def fix_pronoun(self,
                    subject):  # TODO: Extend the context of this function
        """Pronoun fixer to handle situations like YOU and YOURSELF.

        Args:
            subject (str):  Subject that extracted from the user's input/command.

        Returns:
            str:  Response.
        """

        if subject == "yourself":
            return "you"
        elif subject == "Yourself":
            return "You"
        elif subject == "YOURSELF":
            return "YOU"
        else:
            return subject

    def detect_pronoun(self, noun_chunk):
        """Determine whether user is talking about himself/herself or some other entity.

        Args:
            noun_chunk (str):  Noun phrase.

        Returns:
            ((str), (bool)):  Detected pronoun and boolean value depends on the detection.
        """

        np_text = ""
        is_public = True
        doc = self.nlp(noun_chunk)
        for token in doc:
            if token.lemma_ == "-PRON-":
                np_text += ' ' + token.text.lower()
                is_public = False
            else:
                np_text += ' ' + token.text
        return np_text.strip(), is_public

    def upper_capitalize(self, array):
        """Return capitalized and uppercased versions of the strings inside the given array.

        Args:
            array ((list) of (str)s):  List of strings.

        Returns:
            (list) of (str)s:  List of strings.
        """

        result = []
        for word in array:
            result.append(word)
            result.append(word.capitalize())
            result.append(word.upper())
        return result

    def clean(self, com):
        """Return a version of user's command that cleaned from punctuations, symbols, etc.

        Args:
            com (str):  User's command.

        Returns:
            str:  Cleaned version of user's command.
        """

        doc = self.nlp(com)
        for token in doc:
            if token.pos_ in ["PUNCT", "SYM"]:
                com = com.replace(token.tag_, '')

        return com
    def insert_new_profile(self, profile_record):
        from tinydb import TinyDB, Query

        path_to_db = join(self.ARBIE_dir, 'database/databases/client_profiles.json')
        db = TinyDB(path_to_db)
        db.insert(profile_record)
     try:
         input_var = int(input("1: Store Valid AP \n2: Disregard and Continue\n:"))     
         if input_var > 0 and input_var <= 2:
             loop = False
     except ValueError:
         pass
 
 if input_var == 1:
     #db.purge()
     #db.insert(S[input_var - 1])
     #if db.search((where('ssid') == ap["ssid"]) & (where('address') == str(ap["address"]))) == []:
     #    db.insert(ap)
     #else:
     #    print "This is already Stored in the database"
     if db.search((where('ssid') == ap["ssid"]) & (where('address') == str(ap["address"]))) == []:
         db.insert(S[input_var - 1])
     else:
         print "already Stored in the database"
     
     '''
     print all database
     '''
     print db.all()
     
 #ch = channel_hop()
 #_thread = threading.Thread(target=ch.run(debug = True, iface = "wlan4"))
 #_thread.start()
 
 
 
 for ap in db.all():
Example #42
0
    r = requests.get(url, headers=headers)

    x_rte = '//*[@id="shop-all-list"]//li/*[@class="txt"]'
    x_name = './/h4/text()'
    x_type = './/*[@class="tag"]/text()'
    x_address = './/*[@class="addr"]/text()'
    x_taste = './/*[@class="comment-list"]/span[1]/b//text()'
    x_enviroment = './/*[@class="comment-list"]/span[2]/b//text()'
    x_service = './/*[@class="comment-list"]/span[3]/b//text()'
    x_stars = ".//*[contains(@class, 'sml-rank-stars')]/@class"
    x_comments ='.//*[@class="comment"]/a[1]/b/text()'
    x_price ='.//*[@class="comment"]/a[2]/b/text()'

    tree = etree.HTML(r.text)
    rtes = tree.xpath(x_rte)
    print len(rtes)
    for rte in rtes:
        data = {}
        data['name'] = rte.xpath(x_name)[0]
        data['type'] = rte.xpath(x_type)[0]
        data['address']= rte.xpath(x_address)[0]
        data['taste'] = rte.xpath(x_taste)[0] if len(rte.xpath(x_taste)) >0 else ''
        data['enviro'] = rte.xpath(x_enviroment)[0] if len(rte.xpath(x_enviroment)) >0 else ''
        data['service'] = rte.xpath(x_service)[0] if len(rte.xpath(x_service)) >0 else ''
        data['stars'] = rte.xpath(x_stars)[0].replace('sml-rank-stars sml-str', '')[0]
        data['comments'] = rte.xpath(x_comments)[0]
        data['price'] = rte.xpath(x_price)[0].replace(u'\uffe5','') if len(rte.xpath(x_price))>0 else ''
        db.insert(data)

    n+=1
def createExam():
    db = TinyDB("Exam/Class " + clas.get() + ".json")
    db.insert({"Date":datee.get(),"Month":Month.get(),"Subject":subj.get()})
    messagebox.showinfo('Welcome', 'EXAM SCHEDULED')
from datetime import datetime, timezone
import pytz
from tinydb import TinyDB, where
from scrapers import extractPrice
from mailer import Mailer
from credentials import login_, password_, secret_key
import threading
import calendar
from history import get_history, build_plot

db = TinyDB('database.json')
db2 = TinyDB('history.json')
initial_time = 0
if not db2.contains(where('initial_time')):
    start_secs = calendar.timegm(time.gmtime())
    db2.insert({'initial_time': start_secs})
    initial_time = start_secs
else:
    initial_time = db2.search(where('initial_time'))[0]['initial_time']

app = Flask(__name__)
app.secret_key = secret_key


def background_jobs():
    mailer2 = Mailer(login_, password_)
    while True:
        db = TinyDB('database.json')
        print("background_jobs")
        tz = pytz.timezone('Asia/Kolkata')
        kolkata_now = datetime.now(tz)
Example #45
0
class ViberWebhook:
    def __init__(self, appConfig, dbUsers = 'vb_users.json') -> None:
        http_tunnel = ngrok.connect()
        self.public_url = http_tunnel.public_url.replace('http', 'https')
        print('Public URL acquired: ' + self.public_url)
        self.usersDb = TinyDB(dbUsers)
        self.app = Flask(__name__)
        self.config = appConfig
        self.viber = Api(BotConfiguration(
            name = self.config.getProperty('Publishers.Viber.Name'),
            avatar = self.config.getProperty('Publishers.Viber.Avatar'),
            auth_token = self.config.getProperty('Publishers.Viber.Token')
        ))
        self.query = Query()
        
        ## Delayed webhook setup
        scheduler = sched.scheduler(time.time, time.sleep)
        scheduler.enter(5, 1, self.set_webhook, (self.viber,))
        t = threading.Thread(target=scheduler.run)
        t.start()

        self.app.add_url_rule('/', 'incoming', self.incoming, methods=['POST'])
        self.app.add_url_rule('/ctrl', '', self.control, methods=['POST', 'GET'])
        self.t_webApp = threading.Thread(target=self.flaskThread)
        self.t_webApp.setDaemon(True)
        
        print("Viber worker created.")
    
    def __del__(self):
        self.usersDb.close()
        
    def flaskThread(self):
        self.app.run(host='0.0.0.0', port=80, debug=False)

    def Run(self):
        self.t_webApp.run()

    def GetAdmins(self):
        admins = self.usersDb.search(self.query.admin == '1')
        return admins

    def NotifyAdmins(self, admins, message):
        for admin in admins:
            self.viber.send_messages(admin['id'], [ TextMessage(text = message) ])   

    def IsAdmin(self, user_id, admins):
        return next((admin for admin in admins if admin['id'] == user_id), None) != None

    def Reboot():
        command = "/usr/bin/sudo /sbin/shutdown -r now"
        import subprocess
        process = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
        output = process.communicate()[0]
        print(output)

    def RestartViber():
        command = "service Viber restart"
        import subprocess
        process = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
        output = process.communicate()[0]
        print(output)

    def incoming(self):

        admins = self.GetAdmins()
        print(request.path)
        viber_request = self.viber.parse_request(request.get_data().decode('utf8'))

        if isinstance(viber_request, ViberMessageRequest):
            message = viber_request.message
            if isinstance(message, TextMessage):

                is_admin = self.IsAdmin(viber_request.sender.id, admins)
                if is_admin:
                    print("IsAdmin: True")
                
                ## HANDLE ADMIN REQUESTS
                usersListStr = ''
                if(message.text.strip() == "/ListUsers" and is_admin):
                    for user in self.usersDb.all():
                        usersListStr += user['name'] + '\n'
                    self.NotifyAdmins(admins, 'Korisnici: \n' + usersListStr)
                    return Response(status=200)                
                if(message.text.strip() == "/ListAdmins" and is_admin):
                    for user in self.usersDb.search(self.query.admin == '1'):
                        usersListStr += user['name'] + '\n'
                    self.NotifyAdmins(admins, 'Administratori: \n' + usersListStr)
                    return Response(status=200)
                if(message.text.strip() == "/GetPublicURL" and is_admin):
                    self.NotifyAdmins(admins, 'Javna adresa: \n' + self.public_url)
                    return Response(status=200)
                if(message.text.strip() == "/GetLocalIP" and is_admin):
                    self.NotifyAdmins(admins, 'Lokalna adresa: \n' + socket.gethostbyname(socket.gethostname()))
                    return Response(status=200)
                if(message.text.strip() == "/XRebootMe" and is_admin):
                    self.NotifyAdmins(admins, 'Rebooting...')
                    self.Reboot()
                    return Response(status=200)
                if(message.text.strip() == "/XRestartViberService" and is_admin):
                    self.NotifyAdmins(admins, 'Restarting Viber service...')
                    self.RestartViber()
                    return Response(status=200)

                UserQ = Query()

                # Handle standard requests
                if message.text.strip().lower() == 'stop':
                    self.usersDb.update({'active': '0'}, UserQ.id == viber_request.sender.id)
                else:
                    if len(self.usersDb.search(UserQ.id == viber_request.sender.id)) == 0:
                        self.usersDb.insert({'id': viber_request.sender.id, 'name': viber_request.sender.name, 'active': '1', 'admin': '0'})
                    else:
                        self.usersDb.update({'active': '1'}, UserQ.id == viber_request.sender.id)
                    self.viber.send_messages(viber_request.sender.id, [ TextMessage(text = 'Uspešna prijava! Pošalji STOP za odjavu.') ])
                    #self.viber.send_messages("/qNmzm5H8vXHIuuJAmJZvw==", [ TextMessage(text = 'Novi korisnik: ' + viber_request.sender.name) ])
                    self.NotifyAdmins(admins, 'Novi korisnik: ' + viber_request.sender.name)
        elif isinstance(viber_request, ViberConversationStartedRequest):
            UserQ = Query()
            #self.viber.send_messages(viber_request.user.id, [ TextMessage(text='Za prijavu pošaljite bilo kakvu poruku.') ])
            if len(self.usersDb.search(UserQ.id == viber_request.user.id)) == 0:
                self.usersDb.insert({'id': viber_request.user.id, 'name': viber_request.user.name, 'active': '1', 'admin': '0'})
            else:
                self.usersDb.update({'active': '0'}, UserQ.id == viber_request.user.id)
        elif isinstance(viber_request, ViberSubscribedRequest):
            UserQ = Query()
            self.viber.send_messages(viber_request.user.id, [ TextMessage(text='Za prijavu pošaljite bilo kakvu poruku.') ])
            if len(self.usersDb.search(UserQ.id == viber_request.user.id)) == 0:
                self.usersDb.insert({'id': viber_request.user.id, 'name': viber_request.user.name, 'active': '1', 'admin': '0'})
            else:
                self.usersDb.update({'active': '1'}, UserQ.id == viber_request.user.id)
        elif isinstance(viber_request, ViberUnsubscribedRequest):
            UserQ = Query()
            self.usersDb.update({'active': '0'}, UserQ.id == viber_request.user_id)
        elif isinstance(viber_request, ViberFailedRequest):
            logger.warn("client failed receiving message. failure: {0}".format(viber_request))

        return Response(status=200)


    def control(self):
        admins = self.GetAdmins()
        #data = request.get_data().decode('utf8')
        if(request.args.get('command') == 'users'):
            if(request.args.get('a') == '0'):
                usersListStr = ""
                for user in self.usersDb.all():
                    usersListStr += user['name'] + ';'
                return Response(status=200, response=usersListStr)
            else:
                usersListStr = ""
                for user in self.usersDb.search(self.query.admin == '1'):
                    usersListStr += user['name'] + ';'
                return Response(status=200, response=usersListStr)

    def set_webhook(self, viber):
        self.viber.set_webhook(self.public_url)  
Example #46
0
class Learner:
    def __init__(self, nlp):
        self.pronouns = collections.OrderedDict()
        self.pronouns['I'] = 'YOU'
        self.pronouns['ME'] = 'YOU'
        self.pronouns['MY'] = 'YOUR'
        self.pronouns['MINE'] = 'YOURS'
        self.pronouns['MYSELF'] = 'YOURSELF'
        self.pronouns['OUR'] = 'YOUR'
        self.pronouns['OURS'] = 'YOURS'
        self.pronouns['OURSELVES'] = 'YOURSELVES'
        self.pronouns['WE'] = 'YOU'
        self.pronouns['US'] = 'YOU'

        self.inv_pronouns = collections.OrderedDict()
        self.inv_pronouns['YOU'] = 'I'
        self.inv_pronouns['YOUR'] = 'MY'
        self.inv_pronouns['YOURS'] = 'MINE'
        self.inv_pronouns['YOURSELF'] = 'MYSELF'
        self.inv_pronouns['YOURSELVES'] = 'OURSELVES'

        self.auxiliaries = collections.OrderedDict()
        self.auxiliaries['AM'] = 'ARE'
        self.auxiliaries['\'M'] = ' ARE'
        self.auxiliaries['WAS'] = 'WERE'

        self.inv_auxiliaries = collections.OrderedDict()
        self.inv_auxiliaries['ARE'] = 'AM'
        self.inv_auxiliaries['WERE'] = 'WAS'

        home = expanduser('~')
        self.db = TinyDB(home + '/.db.json')
        self.nlp = nlp
        self.db_session = None

    def response(self, com, user_id=None):
        """
        Method to respond the user's input/command using learning ability
        """

        is_public = True
        com = self.clean(com)
        doc = self.nlp(com)
        subject = []
        types = []
        types.append("")
        for np in doc.noun_chunks:
            types.append(np.root.dep_)
            np_text, is_public = self.detect_pronoun(np.text)
            if np.root.dep_ == 'pobj' and types[-2] == 'nsubj':
                subject.append(np.root.head.text)
                subject.append(np_text)
            if np.root.dep_ == 'nsubj' and types[-2] not in [
                    'pobj', 'nsubj'
            ] and np.root.tag_ not in ['WDT', 'WP', 'WP$', 'WRB']:
                subject.append(np_text)
            if np.root.dep_ == 'attr' and types[-2] not in [
                    'pobj', 'nsubj'
            ] and np.root.tag_ not in ['WDT', 'WP', 'WP$', 'WRB']:
                subject.append(np_text)
            if np.root.dep_ == 'dobj' and types[-2] not in [
                    'pobj', 'nsubj'
            ] and np.root.tag_ not in ['WDT', 'WP', 'WP$', 'WRB']:
                subject.append(np_text)
        subject = [x.strip() for x in subject]
        subject = ' '.join(subject)
        if subject:
            if subject.upper() in self.inv_pronouns:
                return ''
            wh_found = False
            for word in doc:
                if word.tag_ in ['WDT', 'WP', 'WP$', 'WRB']:
                    wh_found = True
            if wh_found:
                straight = self.db_get(subject,
                                       is_public=is_public,
                                       user_id=user_id)
                if straight is None:
                    return self.db_get(subject,
                                       is_public=is_public,
                                       user_id=user_id,
                                       invert=True)
                return straight
            else:
                verb_found = False
                verbtense = None
                clause = []
                verbs = []
                for word in doc:
                    if verb_found:
                        if word.pos_ != 'PUNCT':
                            clause.append(word.text)
                    if word.pos_ == 'VERB' and word.is_stop and not verb_found:
                        verb_found = True
                        verbtense = word.text
                    if word.pos_ == 'VERB':
                        verbs.append(word.text)
                clause = [x for x in clause]
                clause = ' '.join(clause).strip()

                if any(verb in verbs for verb in self.upper_capitalize(
                    ['forget', 'remove', 'delete', 'update'])):
                    return self.db_delete(subject,
                                          is_public=is_public,
                                          user_id=user_id)

                if any(verb in verbs for verb in self.upper_capitalize(
                    ['define', 'explain', 'tell', 'describe'])):
                    return self.db_get(subject,
                                       is_public=is_public,
                                       user_id=user_id)

                if verbtense:
                    return self.db_upsert(subject,
                                          verbtense,
                                          clause,
                                          com,
                                          is_public=is_public,
                                          user_id=user_id)

    def db_get(self, subject, invert=False, is_public=True, user_id=None):
        """
        Method to get a record from the database
        """

        if invert:
            result = self.db.search(Query().clause == subject)
        else:
            result = self.db.search(Query().subject == subject)
        if result:
            dictionary = {}
            for row in result:
                if row['verbtense'] not in dictionary:
                    dictionary[row['verbtense']] = []
                if row['clause'] not in dictionary[row['verbtense']]:
                    dictionary[row['verbtense']].append(row['clause'])
            if invert:
                answer = row['subject']
            else:
                answer = subject
            first_verbtense = False
            for key, value in dictionary.items():
                if not first_verbtense:
                    answer += ' ' + str(key)
                    first_verbtense = True
                else:
                    answer += ', ' + str(key)
                first_clause = False
                for clause in value:
                    if not first_clause:
                        answer += ' ' + clause
                        first_clause = True
                    else:
                        answer += ' and ' + clause
            return self.mirror(answer)
        else:
            return None

    def db_upsert(self,
                  subject,
                  verbtense,
                  clause,
                  com,
                  is_public=True,
                  user_id=None):
        """
        Method to insert(or update) a record to the database
        """

        if not self.db.search((Query().subject == subject)
                              & (Query().verbtense == verbtense)
                              & (Query().clause == clause)):
            self.db.insert({
                'subject': subject,
                'verbtense': verbtense,
                'clause': clause
            })
        return 'OK, I get it. ' + self.mirror(com)

    def db_delete(self, subject, is_public=True, user_id=None):
        """
        Method to delete a record from the database
        """

        if self.db.remove(Query().subject == self.fix_pronoun(subject)):
            return 'OK, I forgot everything I know about ' + self.mirror(
                subject)
        else:
            return 'I don\'t even know anything about ' + self.mirror(subject)

    def mirror(self, answer):
        """
        Method to mirror the answer (for example: I'M to YOU ARE).
        """

        result = []
        types = []
        types.append('')
        doc = self.nlp(answer)
        for token in doc:
            types.append(token.lemma_)
            if token.lemma_ == '-PRON-':
                if token.text.upper() in self.pronouns:
                    result.append(
                        self.pronouns[token.text.upper()].lower().strip())
                    continue
                if token.text.upper() in self.inv_auxiliaries:
                    result.append(self.inv_auxiliaries[
                        token.text.upper()].lower().strip())
                    continue
            if (token.lemma_ == 'be'
                    or token.dep_ == 'aux') and types[-2] == '-PRON-':
                if token.text.upper() in self.auxiliaries:
                    result.append(
                        self.auxiliaries[token.text.upper()].lower().strip())
                    continue
                if token.text.upper() in self.inv_auxiliaries:
                    result.append(self.inv_auxiliaries[
                        token.text.upper()].lower().strip())
                    continue
            result.append(token.text.strip())
        for i in range(len(result)):
            if result[i] == 'i':
                result[i] = 'I'
        result = ' '.join(result)
        return result.replace(" '", "'")

    def fix_pronoun(self, subject):
        """
        Pronoun fixer to handle situations like YOU and YOURSELF.
        """

        if subject == 'yourself':
            return 'you'
        elif subject == 'Yourself':
            return 'You'
        elif subject == 'YOURSELF':
            return 'YOU'
        else:
            return subject

    def detect_pronoun(self, noun_chunk):
        """
        Determine whether user is talking about himself/herself or some other entity.
        """

        np_text = ''
        is_public = True
        doc = self.nlp(noun_chunk)
        for token in doc:
            if token.lemma_ == '-PRON-':
                np_text += ' ' + token.text.lower()
                is_public = False
            else:
                np_text += ' ' + token.text
        return np_text.strip(), is_public

    def upper_capitalize(self, array):
        """
        Return capitalize and uppercased versions of the strings inside the given array
        """

        result = []
        for word in array:
            result.append(word)
            result.append(word.capitalize())
            result.append(word.upper())
        return result

    def clean(self, com):
        """
        Return a version of user's command that cleaned from punctuations, symbols, etc
        """

        doc = self.nlp(com)
        for token in doc:
            if token.pos_ in ['PUNCT', 'SYM']:
                com = com.replace(token.tag_, '')
        return com
Example #47
0
option = st.sidebar.selectbox('Menu:', ('Text to summarize', 'Summarization'))

db = TinyDB('documents.json').table('documents')

if option == 'Text to summarize':
    st.header('Enter an URL or a document to summarize')
    st.markdown('*****')
    new_url = st.text_input('Enter your URL:', value='')
    new_text = st.text_area('Enter your document:', value='')
    if new_url != '':
        article = NewsPlease.from_url(new_url)
        new_text = article.maintext
        if new_text != None and new_text != '':
            db.insert({
                'hash': hash(new_text),
                'text': new_text,
            })
            rerun()
    elif new_text != '':
        db.insert({
            'hash': hash(new_text),
            'text': new_text,
        })
        rerun()
    st.markdown('*****')
    for passage in db.all():
        st.write(passage['text'])
        if st.button('Remove', key='remove{}'.format(passage['hash'])):
            db.remove(doc_ids=[db.get(Query().hash == passage['hash']).doc_id])
            rerun()
        st.write('')
Example #48
0
class FileLogSchemaModel:

    query = Query()

    def __init__(self, db_path):
        dbpath = os.path.join(db_path, 'filelogdb.json')
        self._db = TinyDB(dbpath)

    def req_fields_json(self, dict_result):
        req_items = ['flid', 'filename', 'filesize', 'filetype', 'bizdomain', 'uploadtm', 'ingesttm', 'filedesc', 'filedelim', 'fileencode', 'tags', 'state', "nodename", "bizdomain"]
        return {key: value for key, value in dict_result.items()
                if key in req_items}

    def search_op(self, req_dict):
        ####exists = self.search_res(req_dict['db']) && self.search_res(req_dict['schema']) && self.search_res(req_dict['table']) && self.search_res(req_dict['fieldname'])
        exists = self._db.search((FileLogSchemaModel.query.filename == req_dict['filename']) & (FileLogSchemaModel.query.filetype == req_dict['filetype']))

        return True if exists else False

    def get_allrecs(self):
        return self._db.search(FileLogSchemaModel.query.flid > 0)

    
    def search_res(self, filename):
        return self._db.search(FileLogSchemaModel.query.filename == filename)

    
    def search_file(self, filename):
        ctentry = Query();
        #res_info = self._db.search(ctentry.fieldname == fieldname)
        res_info = self._db.search(ctentry.filename.search(filename,flags=re.IGNORECASE))
        return res_info

    def search_file_byid(self, fileid):
        ctentry = Query()
        res_info = self._db.search(ctentry.flid.search(fileid))
        return res_info

    def search_file_bynode(self, node):
        ctentry = Query()
        res_info = self._db.search(ctentry.nodename.search(node))
        return res_info
    
    def get_total_recs(self):
        ctqry = Query();
        dlen = self._db.count(ctqry.flid > 0)
        return dlen
            
    def insert_op(self, req_dict):
        if not self.search_op(req_dict):
            rec_id = self._db.__len__()
            req_dict['flid'] = rec_id+1;
            self._db.insert(req_dict)
            return self._db.all()
        return "None_Insert"

    def delete_op(self, req_dict):
        if self.search_op(req_dict):
            self._db.remove(where('fieldname') == req_dict['fieldname'])
            return self._db.all()
        return "None_Delete"

    def update_op(self, old_fieldname, req_dict):
        if not self.search_res(old_fieldname):
            return False
        self._db.update({'db': req_dict['db'],
                         'schema': req_dict['schema'],
                         'table': req_dict['table'],
                         'fieldname': req_dict['fieldname'],
                         'fielddtype': req_dict['fielddtype']
                         },
                        SchemaModel.query.fieldname == old_fieldname)

        return self._db.all()
     
    def update_state_bynode(self, nodename, state):
        if not self.search_file_bynode(nodename):
            return False
        self._db.update({'state': state
                         },
                        FileLogSchemaModel.query.nodename == nodename)

        return True
     


    
    
    def stop_db(self):
        self._db.close()
Example #49
0
from tinydb import TinyDB, Query
from tinydb import where
import pandas as pd

table = pd.read_excel('Dados_Tetes.xlsx')

table

table.fillna(0, inplace=True)

db = TinyDB('db2.json')
Ft = Query()

header = table.columns
header

dados_DB = {}
for a in header:
  print(a)
  for b in table[a]:
    DB = {a:b}
    db.insert(DB)
    print(a,b)

db.all()

result_db = db.search(Ft.Real != 'null')
result_db


db.search(where('LAST_NAME') == 'Eller')
Example #50
0
cat db.json | python -m json.tool

Pretty to compact convertion:

  cat db.json.pretty | jq -c

'''

import os, time
from tinydb import TinyDB, Query

if os.path.exists('dbtest.json'):
    os.unlink('dbtest.json')

db = TinyDB('dbtest.json')
docid = db.insert({'type': 'startcnt', 'val': 0})  # insert document
docid = db.insert({'type': 'mandelay', 'val': 5})

for item in db:
    print("0>", item)

ablaeufe = db.table('Ablaufliste', cache_size=0)  # disable cache

n = 0
duration = 20
# while n < 20:
while True:
    s = input("{}> ".format(n))
    doc_id = ablaeufe.insert({
        'start': '20-05-11T22:00',
        'duration': duration,
def sendTQadmin(form):
    db = TinyDB("QueryAdminStaff.json")
    db.insert({'ID': q.ID(),'Query':txt_query.get()})
    form.destroy()
    teacherFrom()
# coding: utf-8

# In[1]:


from tinydb import TinyDB, Query
db = TinyDB('db.json')


# In[2]:


db.insert({'type': 'apple', 'count': 7})
db.insert({'type': 'peach', 'count': 3})
db.insert({'type': 'banana', 'count': 5})


# In[3]:


Fruit = Query()
db.search (Fruit.type == 'apple')


# In[4]:


db.update ({'count': 8}, Fruit.type == 'apple')

Example #53
0
#!/usr/bin/env python3
import time
from tinydb import TinyDB
import os
import wikiquote
import random
from datetime import datetime

db = TinyDB('db.json')

def beep():
    print ('\a')
    time.sleep(1)
    print ('\a')
    time.sleep(1)
    print ('\a')

for i in range(5):
    os.system("clear")
    t = input("Time of the session: ")
    start = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    time.sleep(int(t)*60)
    end = datetime.now().strftime('%Y-%m-%d %H:%M:%S')   
    beep()
    print (random.choice(wikiquote.quotes('Work', lang='en'))) 
    log = input("What did you do in this session? ")
    db.insert({'start': str(start), 'end': str(end), 'log': log})

Example #54
0
class DatabaseConnection(object):

    db, user, username, result_dir, site_dirs = None, None, None, None, None

    profile_urls = {
        'twitter': 'https://mobile.twitter.com/',
        'facebook': 'https://www.facebook.com/',
        'instagram': 'https://www.instagram.com/',
        'reddit': 'https://www.reddit.com/user/'
    }

    def __init__(self, username):
        # Make a database connection and return it
        self.db = TinyDB(ROOT_DIR / "scripts" / "database" / "db.json", indent=2, storage=CachingMiddleware(JSONStorage))
        self.user = Query()
        self.username = username
        self.result_dir = ROOT_DIR / "scripts" / "results" / username

    def __enter__(self):
        # Return the database connection
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        # Make sure the database connection gets closed
        self.db.close()

    # Check if user exists in DB
    def check_user(self):
        if self.result_dir.exists():
            return self.db.contains(self.user.name == self.username)
        else:
            return False

    # Get user data from DB
    def get_data(self):
        if self.result_dir.exists():
            if self.check_user():
                return self.db.get(self.user.name == self.username)

    # Update user data, insert if does not exist
    def update_user(self):

        def set_nested(path, val):
            def transform(doc):
                current = doc
                for key in path[:-1]:
                    current = current[key]
                current[path[-1]] = val

            return transform

        def update_data():
            if self.check_user():
                doc_id = self.db.get(self.user.name == self.username).doc_id

                self.site_dirs = self.result_dir.glob('*/')

                for site in self.site_dirs:
                    site_name = str(site.parts[-1])

                    # Update data file paths
                    data_files = site.glob('*.json')
                    for file in data_files:
                        filename = str(file.parts[-1].split('.json')[0])

                        self.db.update(set_nested([
                            'sites_found',
                            site_name,
                            'profile_url'
                        ], self.profile_urls[site_name] + self.username), doc_ids=[doc_id])

                        self.db.update(set_nested([
                            'sites_found',
                            site_name,
                            filename
                        ], str(file)), doc_ids=[doc_id])

                    # Update image file paths for instagram
                    if site_name == 'instagram':
                        image_files = list(filter(lambda p: p.suffix in ['.jpg', '.png'], site.glob('*.*')))
                        image_files.sort(key=os.path.getmtime, reverse=True)

                        self.db.update(set_nested([
                            'sites_found',
                            site_name,
                            'photos',
                        ], {}), doc_ids=[doc_id])

                        for i, file in enumerate(image_files):
                            self.db.update(set_nested([
                                'sites_found',
                                site_name,
                                'photos',
                                i
                            ], str(file)), doc_ids=[doc_id])

        def insert_user():
            self.site_dirs = self.result_dir.glob('*/')

            self.db.insert({
                'name': self.username,
                'sites_found': {
                    str(site.parts[-1]): {
                        'dir_url': str(site),
                    } for site in self.site_dirs
                }
            })

        if self.result_dir.exists():
            if not self.check_user():
                insert_user()

            update_data()

    # Remove user
    def remove_user(self, flag='r'):

        if self.result_dir.exists():
            if self.check_user():
                self.site_dirs = self.result_dir.glob('*/')

                doc_id = self.db.get(self.user.name == self.username).doc_id

                if flag == 'r':
                    self.db.remove(doc_ids=[doc_id])

                elif flag == 'rd':
                    self.db.remove(doc_ids=[doc_id])

                    for site in self.site_dirs:
                        files = [file for file in Path(site).iterdir()]

                        for file in files:
                            if file.is_file():
                                file.unlink()

                        if not any(site.iterdir()):
                            site.rmdir()

                    if not any(self.result_dir.iterdir()):
                        self.result_dir.rmdir()

    # Re-index DB in case of any operation failure
    def reindex_db(self):
        data_dir = ROOT_DIR / "scripts" / "results"

        for user_dir in data_dir.iterdir():
            if user_dir.is_dir():
                uname = str(user_dir.parts[-1])

                self.username = uname
                self.result_dir = ROOT_DIR / "scripts" / "results" / uname

                if not self.check_user():
                    self.update_user()
                else:
                    self.remove_user()
                    self.update_user()
class Game:
    SWITCHES = None

    BLOCK_SIZE = 32

    COLUMNS = 8
    ROWS = 8

    FPS = None

    COLORS = [
        # 0 - Black
        (0, 0, 0),
        # 1 - Green
        (0, 255, 0),
        # 2 - Red
        (255, 0, 0),
        # 3 - Purple
        (128, 0, 128),
        # 4 - Blue
        (0, 0, 255),
        # 5 - Orange
        (255, 165, 0),
        # 6 - Cyan
        (0, 255, 255),
        # 7 - Yellow
        (255, 255, 0),
        # 8 - Dark Grey
        (35, 35, 35),
        # 9 - White
        (255, 255, 255)
    ]

    SHAPES = [
        # Bird
        [
            [7],
        ],

        # Pipe
        [
            [1],
        ],
    ]

    WIDTH = None
    HEIGHT = None

    COUNTDOWN = None

    SCORE_INCREMENT = None

    SCORE = 0
    PIPES = 0

    INTERVAL = None
    INTERVAL_INCREMENT = None

    LEVEL = 1
    LEVEL_INCREMENT = None

    PAUSED = False

    GAMEOVER = False

    BACKGROUND_GRID = None

    BIRD_MOVED = True

    pygame = None
    pygame_font = None
    pygame_screen = None
    unicornhat = None
    db = None

    board = None
    bird = None
    pipe = None

    def __init__(self,
                 switches,
                 columns,
                 rows,
                 fps,
                 countdown,
                 interval,
                 score_increment,
                 level_increment,
                 interval_increment,
                 pygame_instance=None):
        self.SWITCHES = switches
        self.COLUMNS = columns
        self.ROWS = rows
        self.FPS = fps
        self.COUNTDOWN = countdown
        self.INTERVAL = interval
        self.SCORE_INCREMENT = score_increment
        self.LEVEL_INCREMENT = level_increment
        self.INTERVAL_INCREMENT = interval_increment

        if pygame_instance is None:
            self.pygame = pygame
        else:
            self.pygame = pygame_instance

        self.gpio = Gpio

        self.unicornhat = UnicornHat
        self.db = TinyDB('data/database.json')

        try:
            self.WIDTH = self.BLOCK_SIZE * self.COLUMNS + 150
            self.HEIGHT = self.BLOCK_SIZE * self.ROWS

            self.BACKGROUND_GRID = [[
                8 if x % 2 == y % 2 else 0 for x in xrange(self.COLUMNS)
            ] for y in xrange(self.ROWS)]

            self.pygame.init()
            self.pygame.key.set_repeat(0, 0)
            self.pygame_font = self.pygame.font.Font(
                self.pygame.font.get_default_font(), 12)
            self.pygame_screen = self.pygame.display.set_mode(
                (self.WIDTH, self.HEIGHT), 0, 24)
            self.pygame.event.set_blocked(self.pygame.MOUSEMOTION)

            self.unicornhat.rotation(180)
            self.unicornhat.brightness(0.4)

            self.board = Board(self.COLUMNS, self.ROWS)
            self.__generate_bird()
            self.__generate_pipe()
        except AttributeError:
            print("[Game][error] An error occurred initialising game")

    def start(self, run_once=False):
        print("[Game][info] Starting game")

        try:
            pygame_wait = True

            while pygame_wait:
                for event in self.pygame.event.get():
                    if event.type == self.pygame.KEYDOWN:
                        if event.key == self.pygame.K_RETURN:
                            pygame_wait = False
                        elif event.key == self.pygame.K_ESCAPE:
                            self.quit()

                self.pygame_screen.fill(self.COLORS[0])
                self.__display_message("Press to start")
                self.pygame.display.update()

            self.unicornhat.clear()
        except AttributeError:
            print("[Game][error] An error occurred starting game")

        self.__countdown()
        self.__loop()
        self.finish()

        if run_once is not True:
            self.start()

    def __countdown(self):
        print("[Game][info] Starting game countdown")

        try:
            seconds = 0

            while True:
                self.pygame_screen.fill(self.COLORS[0])
                remaining = (self.COUNTDOWN - seconds)

                if seconds > self.COUNTDOWN:
                    break

                if seconds == self.COUNTDOWN:
                    self.__display_message("Go!")
                else:
                    self.__display_message("%d!" % remaining)

                seconds += 1
                self.unicornhat.clear()
                self.pygame.display.update()
                self.pygame.time.wait(1000)

            self.pygame_screen.fill(self.COLORS[0])
            self.unicornhat.clear()
        except AttributeError:
            print("[Game][error] An error occurred starting game countdown")

    def __loop(self):
        print("[Game][info] Starting game loop")

        try:
            self.pygame.time.set_timer(pygame.USEREVENT + 1, self.INTERVAL)

            key_actions = {
                'ESCAPE': lambda: self.quit(),
                'UP': lambda: self.__direction_up(),
                'p': lambda: self.toggle_pause(),
            }

            pygame_clock = self.pygame.time.Clock()

            while not self.GAMEOVER:
                self.unicornhat.clear()
                self.pygame_screen.fill(self.COLORS[0])

                if self.PAUSED:
                    self.__display_message("Paused")
                else:
                    self.__draw_line(((self.BLOCK_SIZE * self.COLUMNS) + 1, 0),
                                     ((self.BLOCK_SIZE * self.COLUMNS) + 1,
                                      (self.HEIGHT - 1)), self.COLORS[9])

                    self.__display_message(
                        "Score: %d\n\nLevel: %d\n\nPipes: %d" %
                        (self.SCORE, self.LEVEL, self.PIPES),
                        ((self.BLOCK_SIZE * self.COLUMNS) + self.BLOCK_SIZE,
                         2), self.COLORS[9], self.COLORS[0], False)

                    self.__draw_matrix(self.BACKGROUND_GRID, (0, 0), None,
                                       False)
                    self.__draw_matrix(self.board.coordinates(), (0, 0), None,
                                       False)
                    self.__draw_matrix(self.pipe.coordinates(),
                                       (self.pipe.x(), self.pipe.y()))
                    self.__draw_matrix(
                        self.bird.coordinates(self.COLUMNS, self.ROWS), (0, 0))

                self.pygame.display.update()

                for event in self.pygame.event.get():
                    if event.type == self.pygame.USEREVENT + 1:
                        self.__move()
                    elif event.type == self.pygame.QUIT:
                        self.quit()
                    elif event.type == self.pygame.KEYDOWN:
                        for key in key_actions:
                            if event.key == eval("self.pygame.K_" + key):
                                key_actions[key]()

                pygame_clock.tick(self.FPS)
        except AttributeError as e:
            print("[Game][error] An error occurred during game loop")
            print(e)

    def __generate_bird(self):
        print("[Game][info] Generating bird")

        self.bird = Bird(self.SHAPES[0], 1, int(math.floor(self.ROWS / 3)))

    def __generate_pipe(self):
        print("[Game][info] Generating pipe")

        offset_x, offset_y = (self.COLUMNS - 1, 0)
        gap = randint(0, 4)
        coordinates = []

        for x in range(1):
            for y in range(self.ROWS):
                rows = []

                if (y < gap) or (y > gap + 2):
                    rows.append(self.SHAPES[1][0][0])
                else:
                    rows.append(0)

                coordinates.append(rows)

        self.pipe = Pipe(coordinates, offset_x, offset_y)

    def __display_message(self,
                          message,
                          coordinates=None,
                          color=COLORS[9],
                          background_color=COLORS[0],
                          unicornhat=True):
        print("[Game][info] Displaying message")

        if unicornhat:
            self.__scroll_message(message,
                                  text_colour=color,
                                  scroll_speed=0.05)

        for i, line in enumerate(message.splitlines()):
            message_image = self.pygame_font.render(line, False, color,
                                                    background_color)

            if coordinates is not None:
                position_x, position_y = coordinates
            else:
                message_image_center_x, message_image_center_y = message_image.get_size(
                )
                message_image_center_x //= 2
                message_image_center_y //= 2
                position_x = self.WIDTH // 2 - message_image_center_x
                position_y = self.HEIGHT // 2 - message_image_center_y

            self.pygame_screen.blit(message_image,
                                    (position_x, position_y + i * 22))

    def __scroll_message(self,
                         message,
                         text_colour=COLORS[9],
                         scroll_speed=0.05):
        r, g, b = text_colour
        scroll_rows = [[0] * 8] * 8

        for character in message:
            if character in font_dictionary:
                character_rows = font_dictionary[character]
            else:
                character_rows = font_dictionary['-']
            for i in range(8):
                scroll_rows[i] = scroll_rows[i] + character_rows[i]
                scroll_rows[i] += [0]

        for i in range(8):
            scroll_rows[i] += [0] * 8

        for scroll_position in range(len(scroll_rows[0]) - 8):
            for y in range(8):
                for x in range(8):
                    self.unicornhat.set_pixel(x, y, r, g, b)

            self.unicornhat.show()
            # time.sleep(scroll_speed)

    def __draw_line(self,
                    start_position,
                    end_position,
                    color=COLORS[9],
                    unicornhat=True):
        print("[Game][info] Drawing line")

        if unicornhat:
            start_x, start_y = start_position
            end_x, end_y = start_position

            for i in xrange(start_y, end_y):
                for j in xrange(start_x, end_x):
                    if start_x == end_x or start_y == end_y:
                        self.unicornhat.set_pixel(j, i, color)

        self.pygame.draw.line(self.pygame_screen, color, start_position,
                              end_position)

    def __draw_matrix(self, matrix, offset, color=None, unicornhat=True):
        print("[Game][info] Drawing matrix")

        offset_x, offset_y = offset

        for y, row in enumerate(matrix):
            for x, val in enumerate(row):
                if val:
                    if color is None:
                        shape_color = self.COLORS[val]
                    else:
                        shape_color = color

                    if unicornhat:
                        self.unicornhat.set_pixel((offset_x + x),
                                                  (offset_y + y), shape_color)

                    self.pygame.draw.rect(
                        self.pygame_screen, shape_color,
                        self.pygame.Rect((offset_x + x) * self.BLOCK_SIZE,
                                         (offset_y + y) * self.BLOCK_SIZE,
                                         self.BLOCK_SIZE, self.BLOCK_SIZE), 0)

    def __count_clear_pipes(self, pipes):
        print("[Game][info] Counting cleared pipes")

        if pipes > 0:
            self.PIPES += pipes
            self.SCORE += self.SCORE_INCREMENT * self.LEVEL

        if self.PIPES >= self.LEVEL * self.LEVEL_INCREMENT:
            self.LEVEL += 1
            delay = self.INTERVAL - self.INTERVAL_INCREMENT * (self.LEVEL - 1)
            delay = 100 if delay < 100 else delay
            self.pygame.time.set_timer(self.pygame.USEREVENT + 1, delay)

    def __move(self):
        print("[Game][info] Moving bird %s" % (self.bird.direction()))

        if not self.GAMEOVER and not self.PAUSED:
            new_x, new_y = self.bird.position()
            pipe_x, pipe_y = self.pipe.position()

            if self.bird.direction() == self.bird.DIRECTION_UP:
                new_y = self.bird.y() - 1
                self.bird.set_direction(self.bird.DIRECTION_DOWN)
            elif self.bird.direction() == self.bird.DIRECTION_DOWN:
                new_y = self.bird.y() + 1

            cleared_pipes = 0

            if pipe_x - 1 >= 0:
                self.pipe.set_position((pipe_x - 1, pipe_y))
            else:
                cleared_pipes += 1
                self.__generate_pipe()

            if self.board.check_collision(self.pipe, (new_x, new_y)):
                self.GAMEOVER = True

                return False

            self.bird.set_position((new_x, new_y))
            self.__count_clear_pipes(cleared_pipes)
            self.BIRD_MOVED = True

            return True

    def __direction_up(self):
        print("[Game][info] Event direction up")

        if self.BIRD_MOVED is True:
            self.bird.set_direction(self.bird.DIRECTION_UP)
            self.BIRD_MOVED = False

    def __direction_down(self):
        print("[Game][info] Event direction down")

        if self.BIRD_MOVED is True:
            self.bird.set_direction(self.bird.DIRECTION_DOWN)
            self.BIRD_MOVED = False

    def toggle_pause(self):
        print("[Game][info] Toggling paused state")

        self.PAUSED = not self.PAUSED

    def get_score(self):
        print("[Game][info] Calculating score")

        return self.SCORE

    def print_score(self, high_score=False):
        print("[Game][info] Printing score")

        score = self.get_score()

        try:
            self.unicornhat.clear()
            self.pygame_screen.fill(self.COLORS[0])

            if high_score:
                self.__display_message("Game Over!\n\nHigh score: %d" % score)
                self.pygame.display.update()
            else:
                self.__display_message("Game Over!\n\nYour score: %d!" %
                                       self.get_score())
                self.pygame.display.update()

            self.pygame.time.wait(3000)
        except AttributeError:
            print("[Game][error] An error occurred printing score")

    def finish(self):
        print("[Game][info] Finishing game")

        score = self.get_score()

        self.pygame.display.update()

        if self.db.contains(Query().score >= score):
            self.print_score()
        else:
            self.print_score(True)

        self.db.insert({'score': score})
        self.reset()

    def quit(self):
        print("[Game][info] Quitting game")

        self.pygame_screen.fill(self.COLORS[0])
        self.unicornhat.clear()
        self.__display_message("Exiting...")
        self.pygame.display.update()
        sys.exit()

    def reset(self):
        print("[Game][info] Resetting game")

        self.PAUSED = False
        self.GAMEOVER = False
        self.SCORE = 0
        self.PIPES = 0
        self.LEVEL = 1
        self.BIRD_MOVED = True

        self.board = Board(self.COLUMNS, self.ROWS)
        self.bird = None
        self.pipe = None
        self.__generate_bird()
        self.__generate_pipe()

        self.unicornhat.clear()
        self.pygame.time.set_timer(pygame.USEREVENT + 1, 0)
        self.pygame.display.update()

    def cleanup(self):
        print("[Game][info] Game clean up")

        try:
            self.unicornhat.clear()
            self.pygame_screen.fill(self.COLORS[0])
            self.pygame.display.update()
            self.pygame.quit()
        except AttributeError:
            print("[Game][error] An error occurred cleaning up")

    def __exit__(self):
        print("[Game][info] Game exit")

        self.cleanup()
Example #56
0
 def _serialize_player(cls, player):
     db_players = TinyDB('ChessTournaments/models/database/db_players.json')
     return db_players.insert(player.serialize())
Example #57
0
    def addValidatedTransaction(self, _transaction, _senderAddress,
                                _recipientAddress, _balance, _totalAmount,
                                _amount):

        new_port = newPort

        db2 = TinyDB(f"./databaseTransaction/{new_port}walletBalance.db")
        db3 = TinyDB(f"./databaseTransaction/{new_port}blockChain.db")
        db = TinyDB(f"./databaseTransaction/{new_port}pendingTransaction.db")

        date = datetime.now()
        timeStamp = date.strftime("%c")
        transaction = Query()
        userBalance = db2.search(transaction.senderAddress == _senderAddress)

        receiverBalance = db2.search(
            transaction.senderAddress == _recipientAddress)

        if userBalance != [] or len(userBalance) > 1:
            userBalanceDict = userBalance[-1]
            if receiverBalance != [] or len(receiverBalance) > 1:
                receiverBalanceDict = receiverBalance[-1]
                userBalanceDict['balance'] = float(
                    userBalanceDict['balance']) - float(_totalAmount)

                receiverBalanceDict['balance'] = float(
                    receiverBalanceDict['balance']) + float(_amount)
                # print(receiverNewBalance)
                db2.write_back(userBalance)

                db.insert({
                    'timeStamp': timeStamp,
                    'transaction': _transaction
                })
                data2 = str("0")
                startingHash = sha256(data2.encode('utf8')).hexdigest()
                db3.insert({
                    'index': 0,
                    'previousHash': str(startingHash),
                    'currentBlock': ""
                })

                db2.write_back(receiverBalance)

                return jsonify({'status': 'pending transaction'})
            else:
                db2.insert({
                    'senderAddress': _senderAddress,
                    'balance': _balance,
                    'timeStamp': timeStamp
                })
                db2.insert({
                    'senderAddress': _recipientAddress,
                    'balance': _amount,
                    'timeStamp': timeStamp
                })

                db.insert({
                    'timeStamp': timeStamp,
                    'transaction': _transaction
                })

                return jsonify({'status': 'pending new transaction '})
        else:

            db2.insert({
                'senderAddress': _senderAddress,
                'balance': _balance,
                'timeStamp': timeStamp
            })
            db2.insert({
                'senderAddress': _recipientAddress,
                'balance': _amount,
                'timeStamp': timeStamp
            })

            db.insert({'timeStamp': timeStamp, 'transaction': _transaction})
            return jsonify({'status': 'invalid'})
Example #58
0
def testMemoryStorage():
    db = TinyDB(storage=MemoryStorage)
    db.insert({'data': 5})
    print(db.search(where('data') == 5))
Example #59
0
from random import choice, randint
from string import ascii_uppercase, digits

from tinydb import TinyDB

DB_path = './data/dummydb.json'
timestamp = 1586005981

all_data = {}
for _ in range(0, randint(0, 10)):
    random_time = timestamp + randint(-1000, 1000)
    pos_users = []
    for _ in range(0, randint(0, 15)):
        user = ''.join([choice(ascii_uppercase + digits) for _ in range(16)])
        pos_users.append(user)
    all_data[random_time] = pos_users

db = TinyDB(DB_path)
db.insert(all_data)
Example #60
0
class Game:
    SWITCHES = None

    BLOCK_SIZE = 16

    COLUMNS = None
    ROWS = None

    SHAPES_NEXT_COUNT = None

    FPS = None

    COLORS = [
        # 0 - Black
        (0, 0, 0),
        # 1 - Purple
        (128, 0, 128),
        # 2 - Green
        (0, 255, 0),
        # 3 - Red
        (255, 0, 0),
        # 4 - Blue
        (0, 0, 255),
        # 5 - Orange
        (255, 165, 0),
        # 6 - Cyan
        (0, 255, 255),
        # 7 - Yellow
        (255, 255, 0),
        # 8 - Dark Grey
        (35, 35, 35),
        # 9 - White
        (255, 255, 255)
    ]

    SHAPES = [
        # T
        [
            [1, 1, 1],
            [0, 1, 0],
        ],

        # S
        [
            [0, 2, 2],
            [2, 2, 0]
        ],

        # Z
        [
            [3, 3, 0],
            [0, 3, 3]
        ],

        # J
        [
            [4, 0, 0],
            [4, 4, 4]
        ],

        # L
        [
            [0, 0, 5],
            [5, 5, 5],
        ],

        # I
        [
            [6, 6, 6, 6],
        ],

        # O
        [
            [7, 7],
            [7, 7],
        ]
    ]

    WIDTH = None
    HEIGHT = None

    COUNTDOWN = None

    SCORE_INCREMENTS = None

    LINES = 0
    SCORE = 0

    LEVEL = 1
    LEVEL_INCREMENT = None

    INTERVAL = None
    INTERVAL_INCREMENT = None

    PAUSED = False

    GAMEOVER = False

    BACKGROUND_GRID = None
    BACKGROUND_BOX = None

    RGB_MATRIX_HARDWARE = None
    RGB_MATRIX_ROWS = None
    RGB_MATRIX_CHAIN_LENGTH = None
    RGB_MATRIX_PARALLEL = None
    RGB_MATRIX_PWM_BITS = None
    RGB_MATRIX_BRIGHTNESS = None
    RGB_MATRIX_LSB_NANOSECONDS = None
    RGB_MATRIX_GPIO_SLOWDOWN = None

    pygame = None
    pygame_font = None
    pygame_screen = None
    pygame_clock = None

    rgbmatrix = None
    rgbmatrix_options = None
    rgbmatrix_font = None

    gpio = None
    db = None

    board = None
    shape = None
    shapes_next = None

    def __init__(self, switches, columns, rows, shapes_next_count, fps, countdown, interval, score_increments, level_increment, interval_increment, rgb_matrix_hardware, rgb_matrix_rows, rgb_matrix_chain_length, rgb_matrix_parallel, rgb_matrix_pwm_bits, rgb_matrix_brightness, rgb_matrix_lsb_nanoseconds, rgb_matrix_gpio_slowdown, rgb_matrix_disable_hardware_pulsing, rgb_matrix_rgb_sequence, pygame_instance=None):
        self.SWITCHES = switches
        self.COLUMNS = columns
        self.ROWS = rows
        self.SHAPES_NEXT_COUNT = shapes_next_count
        self.FPS = fps
        self.COUNTDOWN = countdown
        self.INTERVAL = interval
        self.SCORE_INCREMENTS = score_increments
        self.LEVEL_INCREMENT = level_increment
        self.INTERVAL_INCREMENT = interval_increment
        self.RGB_MATRIX_HARDWARE = rgb_matrix_hardware
        self.RGB_MATRIX_ROWS = rgb_matrix_rows
        self.RGB_MATRIX_CHAIN_LENGTH = rgb_matrix_chain_length
        self.RGB_MATRIX_PARALLEL = rgb_matrix_parallel
        self.RGB_MATRIX_PWM_BITS = rgb_matrix_pwm_bits
        self.RGB_MATRIX_BRIGHTNESS = rgb_matrix_brightness
        self.RGB_MATRIX_LSB_NANOSECONDS = rgb_matrix_lsb_nanoseconds
        self.RGB_MATRIX_GPIO_SLOWDOWN = rgb_matrix_gpio_slowdown
        self.RGB_MATRIX_DISABLE_HARDWARE_PULSING = rgb_matrix_disable_hardware_pulsing
        self.RGB_MATRIX_RGB_SEQUENCE = rgb_matrix_rgb_sequence

        self.gpio = Gpio

        self.rgbmatrix_options = RgbMatrixOptions()
        self.rgbmatrix_options.hardware_mapping = self.RGB_MATRIX_HARDWARE
        self.rgbmatrix_options.rows = self.RGB_MATRIX_ROWS
        self.rgbmatrix_options.chain_length = self.RGB_MATRIX_CHAIN_LENGTH
        self.rgbmatrix_options.parallel = self.RGB_MATRIX_PARALLEL
        self.rgbmatrix_options.pwm_bits = self.RGB_MATRIX_PWM_BITS
        self.rgbmatrix_options.brightness = self.RGB_MATRIX_BRIGHTNESS
        self.rgbmatrix_options.pwm_lsb_nanoseconds = self.RGB_MATRIX_LSB_NANOSECONDS
        self.rgbmatrix_options.gpio_slowdown = self.RGB_MATRIX_GPIO_SLOWDOWN
        self.rgbmatrix_options.disable_hardware_pulsing = self.RGB_MATRIX_DISABLE_HARDWARE_PULSING
        self.rgbmatrix_options.led_rgb_sequence = self.RGB_MATRIX_RGB_SEQUENCE

        if pygame_instance is None:
            self.pygame = pygame
        else:
            self.pygame = pygame_instance

        self.db = TinyDB('data/database.json')

        try:
            self.gpio.setmode(self.gpio.BCM)

            for switch in self.SWITCHES.keys():
                self.gpio.setup(switch, self.gpio.IN)

            self.WIDTH = self.BLOCK_SIZE * (self.COLUMNS + 12)
            self.HEIGHT = self.BLOCK_SIZE * (self.ROWS + 24)

            self.BACKGROUND_GRID = [
                [8 if x % 2 == y % 2 else 0 for x in xrange(self.COLUMNS)]
                for y in xrange(self.ROWS)
            ]
            self.BACKGROUND_BOX = [
                [9 if (x == 0 or x == self.COLUMNS + 1 or y == 0 or y == self.ROWS + 1) else 0 for x in xrange(self.COLUMNS + 2)]
                for y in xrange(self.ROWS + 2)
            ]

            self.pygame.init()
            self.pygame.key.set_repeat(150, 50)
            self.pygame_font = self.pygame.font.Font(self.pygame.font.get_default_font(), 12)
            self.pygame_screen = self.pygame.display.set_mode((self.WIDTH, self.HEIGHT), 0, 24)
            self.pygame.display.set_caption('RGB Matrix Tetris')
            self.pygame.event.set_blocked(self.pygame.MOUSEMOTION)
            self.pygame.mouse.set_visible(0)
            self.pygame_clock = self.pygame.time.Clock()

            self.rgbmatrix = RgbMatrix(options=self.rgbmatrix_options)
            self.rgbmatrix_graphics = Graphics
            self.rgbmatrix_font = self.rgbmatrix_graphics.Font()
            self.rgbmatrix_font.LoadFont('./rgbmatrixtetris/fonts/4x6.bdf')

            self.board = Board(self.COLUMNS, self.ROWS)
            self.__generate_shapes()
        except AttributeError:
            print("[Game][error] An error occurred initialising game")

    def start(self, run_once=False):
        print("[Game][info] Starting game")

        try:
            self.rgbmatrix.Clear()
            self.pygame_screen.fill((0, 0, 0))
            self.__display_message("Press\n\nenter\n\nto\n\nstart")
            self.pygame.display.update()
            pygame_wait = True

            while pygame_wait:
                try:
                    for event in self.pygame.event.get():
                        if event.type == self.pygame.KEYDOWN:
                            if event.key == self.pygame.K_RETURN:
                                pygame_wait = False
                            elif event.key == self.pygame.K_ESCAPE:
                                self.quit()
                        elif event.type == self.pygame.QUIT:
                            self.quit()
                except KeyboardInterrupt:
                    self.quit()
        except AttributeError:
            print("[Game][error] An error occurred starting game")

        self.__countdown()
        self.__loop()
        self.finish()

        if run_once is not True:
            self.start()

    def __countdown(self):
        print("[Game][info] Starting game countdown")
        try:
            start_ticks = self.pygame.time.get_ticks()

            while True:
                seconds = (self.pygame.time.get_ticks() - start_ticks) / 1000
                self.rgbmatrix.Clear()
                self.pygame_screen.fill((0, 0, 0))
                remaining = (self.COUNTDOWN - seconds)

                if seconds > self.COUNTDOWN:
                    break

                if seconds == self.COUNTDOWN:
                    self.__display_message("Go!")
                elif seconds == 0:
                    self.__display_message("Starting\n\nin %d!" % remaining)
                else:
                    self.__display_message("%d!" % remaining)

                self.pygame.display.update()
                self.pygame.time.wait(1000)
        except AttributeError:
            print("[Game][error] An error occurred starting game countdown")

    def __loop(self):
        print("[Game][info] Starting game loop")

        try:
            self.pygame.time.set_timer(pygame.USEREVENT + 1, self.INTERVAL)

            key_actions = {
                'ESCAPE': lambda: self.quit(),
                'LEFT': lambda: self.__move(-1),
                'RIGHT': lambda: self.__move(+1),
                'DOWN': lambda: self.__drop(True),
                'UP': lambda: self.__rotate_shape(),
                'p': lambda: self.toggle_pause(),
                'RETURN': lambda: self.__instant_drop()
            }

            start_ticks = self.pygame.time.get_ticks()

            while not self.GAMEOVER:
                self.pygame_clock.tick(self.FPS)
                rgbmatrix_canvas = self.rgbmatrix.CreateFrameCanvas()
                self.pygame_screen.fill((0, 0, 0))

                if self.PAUSED:
                    self.__display_message("Paused", None, self.COLORS[9], self.COLORS[0], None)
                else:
                    self.__display_message("Next:", ((self.COLUMNS + 3), 1), self.COLORS[9], self.COLORS[0], None, False)
                    self.__display_message("Time: \n\n\n\n\nScore: \n\n\n\n\nLines: \n\n\n\n\nLevel:", (1, (self.ROWS + 3)), self.COLORS[9], self.COLORS[0], None, False)

                    self.__display_message("%s" % (str(datetime.timedelta(seconds=((self.pygame.time.get_ticks() - start_ticks) / 1000)))), (1, (self.ROWS + 8)), self.COLORS[3], self.COLORS[0], (60, './rgbmatrixtetris/fonts/4x6.bdf'), rgbmatrix_canvas)
                    self.__display_message("%d" % (self.SCORE), (1, (self.ROWS + 14)), self.COLORS[2], self.COLORS[0], (60, './rgbmatrixtetris/fonts/4x6.bdf'), rgbmatrix_canvas)
                    self.__display_message("%d" % (self.LINES), (1, (self.ROWS + 20)), self.COLORS[5], self.COLORS[0], (60, './rgbmatrixtetris/fonts/4x6.bdf'), rgbmatrix_canvas)
                    self.__display_message("%d" % (self.LEVEL), (1, (self.ROWS + 26)), self.COLORS[6], self.COLORS[0], (60, './rgbmatrixtetris/fonts/4x6.bdf'), rgbmatrix_canvas)

                    self.__draw_matrix(self.BACKGROUND_GRID, (1, 1), None, False)
                    self.__draw_matrix(self.board.coordinates(), (1, 1), None, rgbmatrix_canvas)
                    self.__draw_matrix(self.shape.coordinates(), self.shape.position((1, 1)), None, rgbmatrix_canvas)
                    self.__draw_matrix(self.BACKGROUND_BOX, (0, 0), None, rgbmatrix_canvas)

                    for i in range(0, (self.SHAPES_NEXT_COUNT - 1)):
                        self.__draw_matrix(self.shapes_next[i].coordinates(), ((self.COLUMNS + 3), (((i + 1) * 5) - 2)), None, rgbmatrix_canvas)

                rgbmatrix_canvas = self.rgbmatrix.SwapOnVSync(rgbmatrix_canvas)
                self.pygame.display.update()

                try:
                    for event in self.pygame.event.get():
                        if event.type == self.pygame.USEREVENT + 1 and not self.PAUSED:
                            self.__drop(False)
                        elif event.type == self.pygame.QUIT:
                            self.quit()
                        elif event.type == self.pygame.KEYDOWN:
                            for key in key_actions:
                                if event.key == eval("self.pygame.K_" + key):
                                    key_actions[key]()
                except KeyboardInterrupt:
                    self.quit()
        except AttributeError:
            print("[Game][error] An error occurred during game loop")

    def __button_press(self, channel):
        print("[Game][info] Button pressed: %d" % channel)

        event = self.pygame.event.Event(self.pygame.KEYDOWN, key=eval("self.pygame.K_" + self.SWITCHES[channel]), trigger='gpio')
        self.pygame.event.post(event)

    def __generate_shapes(self):
        print("[Game][info] Generating next shapes")

        self.shapes_next = [Shape(self.SHAPES[rand(len(self.SHAPES))]) for x in xrange(self.SHAPES_NEXT_COUNT)]
        self.__next_shape()

    def __next_shape(self):
        print("[Game][info] Getting next shape")

        self.shape = self.shapes_next.pop(0)
        self.shapes_next.append(Shape(self.SHAPES[rand(len(self.SHAPES))]))

        self.shape.set_x(int(self.COLUMNS / 2 - len(self.shape.coordinates()[0]) / 2))
        self.shape.set_y(0)

        if self.board.check_collision(self.shape, self.shape.position()):
            self.GAMEOVER = True

    def __display_message(self, message, coordinates=None, color=COLORS[9], background_color=COLORS[0], message_size=None, rgbmatrix=None):
        print("[Game][info] Displaying message")

        for i, line in enumerate(message.splitlines()):
            pygame_message_font = self.pygame_font
            rgbmatrix_message_font = self.rgbmatrix_font

            if message_size is not None:
                pygame_message_size, rgbmatrix_message_size = message_size
                pygame_message_font = self.pygame.font.Font(self.pygame.font.get_default_font(), pygame_message_size)
                rgbmatrix_message_font = self.rgbmatrix_graphics.Font()
                rgbmatrix_message_font.LoadFont(rgbmatrix_message_size)

            pygame_message_image = pygame_message_font.render(line, False, color, background_color)

            if coordinates is not None:
                pygame_position_x, pygame_position_y = coordinates
                pygame_position_x = (pygame_position_x * self.BLOCK_SIZE)
                pygame_position_y = (pygame_position_y * self.BLOCK_SIZE)
                rgbmatrix_position_x, rgbmatrix_position_y = coordinates
            else:
                pygame_message_image_center_x, pygame_message_image_center_y = pygame_message_image.get_size()
                pygame_message_image_center_x //= 2
                pygame_message_image_center_y //= 2
                pygame_position_x = (self.WIDTH // 2 - pygame_message_image_center_x)
                pygame_position_y = (self.HEIGHT // 2 - pygame_message_image_center_y + i)
                rgbmatrix_position_x = (self.WIDTH / self.BLOCK_SIZE - (4 * len(line))) // 2
                rgbmatrix_position_y = (((self.HEIGHT / self.BLOCK_SIZE - rgbmatrix_message_font.height) // 2) + (i * 3))

            if rgbmatrix is not False:
                if rgbmatrix is None:
                    rgbmatrix = self.rgbmatrix

                r, g, b = color
                self.rgbmatrix_graphics.DrawText(rgbmatrix, rgbmatrix_message_font, rgbmatrix_position_x, rgbmatrix_position_y, self.rgbmatrix_graphics.Color(r, g, b), line)

            self.pygame_screen.blit(
                pygame_message_image,
                (pygame_position_x, pygame_position_y)
            )

    def __draw_line(self, start_position, end_position, color=COLORS[9], rgbmatrix=True):
        print("[Game][info] Drawing line")

        if rgbmatrix:
            r, g, b = color
            start_x, start_y = start_position
            end_x, end_y = end_position
            self.rgbmatrix_graphics.DrawLine(self.rgbmatrix, start_x, start_y, end_x, end_y, self.rgbmatrix_graphics.Color(r, g, b))

        self.pygame.draw.line(self.pygame_screen, color, start_position, end_position)

    def __draw_matrix(self, matrix, offset, color=None, rgbmatrix=None):
        print("[Game][info] Drawing matrix")

        off_x, off_y = offset

        for y, row in enumerate(matrix):
            for x, val in enumerate(row):
                if val:
                    if color is None:
                        shape_color = self.COLORS[val]
                    else:
                        shape_color = color

                    if rgbmatrix is not False:
                        if rgbmatrix is None:
                            rgbmatrix = self.rgbmatrix

                        r, g, b = shape_color
                        rgbmatrix.SetPixel((off_x + x), (off_y + y), r, g, b)

                    self.pygame.draw.rect(
                        self.pygame_screen,
                        shape_color,
                        self.pygame.Rect((off_x + x) * self.BLOCK_SIZE, (off_y + y) * self.BLOCK_SIZE, self.BLOCK_SIZE, self.BLOCK_SIZE),
                        0
                    )

    def __count_clear_rows(self, rows):
        print("[Game][info] Counting cleared rows")

        self.LINES += rows
        self.SCORE += self.SCORE_INCREMENTS[rows] * self.LEVEL

        if self.LINES >= self.LEVEL * self.LEVEL_INCREMENT:
            self.LEVEL += 1
            delay = self.INTERVAL - self.INTERVAL_INCREMENT * (self.LEVEL - 1)
            delay = 100 if delay < 100 else delay
            self.pygame.time.set_timer(self.pygame.USEREVENT + 1, delay)

    def __move(self, delta_x):
        print("[Game][info] Moving shape")

        if not self.GAMEOVER and not self.PAUSED:
            new_x = self.shape.x() + delta_x

            if new_x < 0:
                new_x = 0

            if new_x > self.COLUMNS - len(self.shape.coordinates()[0]):
                new_x = self.COLUMNS - len(self.shape.coordinates()[0])

            if not self.board.check_collision(self.shape, (new_x, self.shape.y())):
                self.shape.set_x(new_x)

    def __drop(self, manual):
        print("[Game][info] Drop shape")

        if not self.GAMEOVER and not self.PAUSED:
            self.SCORE += 1 if manual else 0
            self.shape.set_y(self.shape.y() + 1)

            if self.board.check_collision(self.shape, self.shape.position()):
                self.board.join_matrixes(self.shape, self.shape.position())
                self.__next_shape()
                cleared_rows = 0

                while True:
                    for i, row in enumerate(self.board.coordinates()[:-1]):
                        if 0 not in row:
                            self.board.clear_row(i)
                            cleared_rows += 1
                            break
                    else:
                        break

                self.__count_clear_rows(cleared_rows)

                return True

        return False

    def __instant_drop(self):
        print("[Game][info] Instant drop shape")

        if not self.GAMEOVER and not self.PAUSED:
            while not self.__drop(True):
                pass

    def __rotate_shape(self, direction='clockwise'):
        print("[Game][info] Rotating shape %s" % (direction))

        if not self.GAMEOVER and not self.PAUSED:
            new_coordinates = (0, 0)

            if direction == 'clockwise':
                new_coordinates = self.shape.rotate_clockwise()
            elif direction == 'anti-clockwise':
                new_coordinates = self.shape.rotate_anti_clockwise()

            if not self.board.check_collision(Shape(new_coordinates), self.shape.position()):
                self.shape.set_coordinates(new_coordinates)

    def toggle_pause(self):
        print("[Game][info] Toggling paused state")

        self.PAUSED = not self.PAUSED

    def get_score(self):
        print("[Game][info] Calculating score")

        return self.SCORE

    def print_score(self):
        print("[Game][info] Printing score")

        score = self.get_score()

        try:
            self.__display_message("Game\n\nOver!\n\nYour\n\nscore:\n\n%d" % score)
            self.pygame.display.update()
            self.pygame.time.wait(3000)
        except AttributeError:
            print("[Game][error] An error occurred printing score")

    def print_high_score(self):
        print("[Game][info] Printing high score: %d" % self.get_score())

        try:
            self.__display_message("Game\n\nOver!\n\nHigh\n\nscore:\n\n%d!" % self.get_score())
            self.pygame.display.update()
            self.pygame.time.wait(3000)
        except AttributeError:
            print("[Game][error] An error occurred printing high score")

    def finish(self):
        print("[Game][info] Finishing game")

        score = self.get_score()

        self.rgbmatrix.Clear()
        self.pygame_screen.fill((0, 0, 0))

        if self.db.contains(Query().score >= score):
            self.print_score()
        else:
            self.print_high_score()

        self.db.insert({'score': score})
        self.reset()

    def quit(self):
        print("[Game][info] Quitting game")

        self.rgbmatrix.Clear()
        self.pygame_screen.fill((0, 0, 0))
        self.__display_message("Quit...")
        self.pygame.display.update()
        self.pygame.quit()
        sys.exit()

    def reset(self):
        print("[Game][info] Resetting game")

        self.PAUSED = False
        self.GAMEOVER = False
        self.SCORE = 0
        self.LINES = 0
        self.LEVEL = 1

        self.board = Board(self.COLUMNS, self.ROWS)
        self.shape = None
        self.shapes_next = None
        self.__generate_shapes()

        self.pygame.time.set_timer(pygame.USEREVENT + 1, 0)
        self.pygame.display.update()

    def cleanup(self):
        print("[Game][info] Game clean up")

        try:
            self.rgbmatrix.Clear()
        except AttributeError:
            print("[Game][error] An error occurred cleaning up")

    def __exit__(self):
        print("[Game][info] Game exit")

        self.cleanup()